commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
c88b060e216b494d893a1cf50de4e85b276740c1
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup, find_packages from eodatasets import __version__ as version # Append TeamCity build number if it gives us one. if 'BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['BUILD_NUMBER'] setup( name="eodatasets", version=version, packages=find_packages(), install_requires=[ 'click', 'python-dateutil', 'gdal', 'numpy', 'pathlib', 'pyyaml', ], entry_points=''' [console_scripts] eod-package=eodatasets.scripts.package:cli eod-generate-browse=eodatasets.scripts.generatebrowse:cli ''', )
#!/usr/bin/env python import os from setuptools import setup, find_packages from eodatasets import __version__ as version # Append TeamCity build number if it gives us one. if 'BUILD_NUMBER' in os.environ and version.endswith('b'): version += '' + os.environ['BUILD_NUMBER'] setup( name="eodatasets", version=version, packages=find_packages(exclude=('tests', 'tests.*')), install_requires=[ 'click', 'python-dateutil', 'gdal', 'numpy', 'pathlib', 'pyyaml', ], entry_points=''' [console_scripts] eod-package=eodatasets.scripts.package:cli eod-generate-browse=eodatasets.scripts.generatebrowse:cli ''', )
Exclude test package from builds.
Exclude test package from builds.
Python
apache-2.0
jeremyh/eo-datasets,GeoscienceAustralia/eo-datasets,GeoscienceAustralia/eo-datasets,jeremyh/eo-datasets
--- +++ @@ -12,7 +12,7 @@ setup( name="eodatasets", version=version, - packages=find_packages(), + packages=find_packages(exclude=('tests', 'tests.*')), install_requires=[ 'click', 'python-dateutil',
ff029b3cd79ab3f68ed5fc56be069e29580d5b46
setup.py
setup.py
#!/usr/bin/env python import os from numpy.distutils.core import setup, Extension # Utility function to read the README file. def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() wrapper = Extension('fortran_routines', sources=['src/fortran_routines.f90'], extra_f90_compile_args=["-std=f2003"]) setup(name='stripsim', version='0.1', description='A simulation pipeline for the LSPE/Strip instrument', author='Maurizio Tomasi', author_email='maurizio.tomasi@unimi.it', license='MIT', url='https://github.com/ziotom78/stripsim', long_description=read('README.md'), py_modules=['src/stripsim'], install_requires=['pyyaml'], ext_modules=[wrapper] )
#!/usr/bin/env python import os from numpy.distutils.core import setup, Extension # Utility function to read the README file. def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() wrapper = Extension('fortran_routines', sources=['src/fortran_routines.f90'], extra_f90_compile_args=["-std=f2003"]) setup(name='stripsim', version='0.1', description='A simulation pipeline for the LSPE/Strip instrument', author='Maurizio Tomasi', author_email='maurizio.tomasi@unimi.it', license='MIT', url='https://github.com/ziotom78/stripsim', long_description=read('README.md'), py_modules=['src/stripsim'], install_requires=['healpy', 'pyyaml'], ext_modules=[wrapper] )
Add a dependency on healpy
Add a dependency on healpy
Python
mit
ziotom78/stripeline,ziotom78/stripeline
--- +++ @@ -19,6 +19,6 @@ url='https://github.com/ziotom78/stripsim', long_description=read('README.md'), py_modules=['src/stripsim'], - install_requires=['pyyaml'], + install_requires=['healpy', 'pyyaml'], ext_modules=[wrapper] )
472b0a0ba90054f151a60e200902b67223fbf6d9
setup.py
setup.py
from distutils.core import setup from setuptools.command.install import install try: description = open('README.txt').read() except: description = open('README.md').read() setup( name='python-ldap-test', version='0.2.2', author='Adrian Gruntkowski', author_email='adrian.gruntkowski@gmail.com', packages=['ldap_test', 'ldap_test.test'], url='https://github.com/zoldar/python-ldap-test/', license='LICENSE.txt', description=('Tool for testing code speaking with LDAP server. Allows to easily' ' configure and run an embedded, in-memory LDAP server. Uses' ' UnboundID LDAP SDK through Py4J.'), keywords = ['testing', 'tests', 'test', 'ldap'], long_description=description, install_requires=[ "py4j >= 0.10.2.1", ], package_data={ '': ['*.txt'], 'ldap_test': ['*.jar'], }, options={ 'bdist_rpm': { 'build_requires':[ 'python', 'python-setuptools', 'py4j', ], 'requires':[ 'python', 'py4j', ], }, }, )
import codecs from distutils.core import setup def read(fname): ''' Read a file from the directory where setup.py resides ''' with codecs.open(fname, encoding='utf-8') as rfh: return rfh.read() try: description = read('README.txt') except: description = read('README.md') setup( name='python-ldap-test', version='0.2.2', author='Adrian Gruntkowski', author_email='adrian.gruntkowski@gmail.com', packages=['ldap_test', 'ldap_test.test'], url='https://github.com/zoldar/python-ldap-test/', license='LICENSE.txt', description=('Tool for testing code speaking with LDAP server. Allows to easily' ' configure and run an embedded, in-memory LDAP server. Uses' ' UnboundID LDAP SDK through Py4J.'), keywords=['testing', 'tests', 'test', 'ldap'], long_description=description, install_requires=[ "py4j >= 0.10.2.1", ], package_data={ '': ['*.txt'], 'ldap_test': ['*.jar'], }, options={ 'bdist_rpm': { 'build_requires':[ 'python', 'python-setuptools', 'py4j', ], 'requires':[ 'python', 'py4j', ], }, }, )
Fix installation on systems where locales are not properly configured
Fix installation on systems where locales are not properly configured
Python
mit
zoldar/python-ldap-test,zoldar/python-ldap-test
--- +++ @@ -1,11 +1,18 @@ +import codecs from distutils.core import setup -from setuptools.command.install import install +def read(fname): + ''' + Read a file from the directory where setup.py resides + ''' + with codecs.open(fname, encoding='utf-8') as rfh: + return rfh.read() + try: - description = open('README.txt').read() + description = read('README.txt') except: - description = open('README.md').read() + description = read('README.md') setup( @@ -19,7 +26,7 @@ description=('Tool for testing code speaking with LDAP server. Allows to easily' ' configure and run an embedded, in-memory LDAP server. Uses' ' UnboundID LDAP SDK through Py4J.'), - keywords = ['testing', 'tests', 'test', 'ldap'], + keywords=['testing', 'tests', 'test', 'ldap'], long_description=description, install_requires=[ "py4j >= 0.10.2.1",
0466ed83bdf9a84f01235339aef7505d26d6df3a
setup.py
setup.py
from setuptools import setup, find_packages setup( name='manifold', version='1.0.0', packages=find_packages(include=('manifold', 'manifold.*')), long_description=open('README.md').read(), include_package_data=True, install_requires=( 'numpy', 'scipy', 'matplotlib', 'sklearn', 'networkx', 'fake-factory', 'nose', 'nose-parameterized', 'coverage', 'radon', ), license='MIT', test_suite='tests.unit', classifiers=[ 'Programming Language :: Python', 'License :: MIT', 'Natural language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Topic :: Manifold Learning', ], requires=['networkx', 'numpy', 'sklearn', 'matplotlib'] )
from setuptools import setup, find_packages setup( name='manifold', version='1.0.0', packages=find_packages(include=('manifold', 'manifold.*')), long_description=open('README.md').read(), include_package_data=True, install_requires=( 'numpy', 'scipy', 'matplotlib', 'sklearn', 'networkx', 'fake-factory', 'nose', 'nose-parameterized', 'coverage', 'radon', ), license='MIT', test_suite='tests.unit', classifiers=[ 'Programming Language :: Python', 'License :: MIT', 'Natural language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Topic :: Manifold Learning', ], requires=['networkx', 'numpy', 'sklearn', 'matplotlib', 'scipy'] )
Add scipy to project requirements list
Add scipy to project requirements list
Python
mit
lucasdavid/Manifold-Learning,lucasdavid/Manifold-Learning
--- +++ @@ -27,5 +27,5 @@ 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Topic :: Manifold Learning', - ], requires=['networkx', 'numpy', 'sklearn', 'matplotlib'] + ], requires=['networkx', 'numpy', 'sklearn', 'matplotlib', 'scipy'] )
bfbe4f0a2fa231b22f6ebaae3eb1065565ab66e4
setup.py
setup.py
# -*- coding: utf-8 -*- """ setup :copyright: (c) 2012-2014 by Openlabs Technologies & Consulting (P) Limited :license: GPLv3, see LICENSE for more details. """ from setuptools import setup setup(name='trytond_sentry', version='3.0.1.0', description='Sentry Client for Tryton', long_description=open('README.rst').read(), author="Openlabs Technologies & Consulting (P) Limited", author_email="info@openlabs.co.in", url="http://www.openlabs.co.in", package_dir={'trytond_sentry': '.'}, packages=[ 'trytond_sentry', ], scripts=[ 'bin/trytond_sentry', ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Plugins', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Office/Business', ], license='GPL-3', install_requires=[ "trytond>=3.0,<3.1", "raven", ], zip_safe=False, )
# -*- coding: utf-8 -*- """ setup :copyright: (c) 2012-2014 by Openlabs Technologies & Consulting (P) Limited :license: GPLv3, see LICENSE for more details. """ from setuptools import setup setup(name='trytond_sentry', version='3.0.1.0', description='Sentry Client for Tryton', long_description=open('README.rst').read(), author="Openlabs Technologies & Consulting (P) Limited", author_email="info@openlabs.co.in", url="https://github.com/openlabs/trytond-sentry", package_dir={'trytond_sentry': '.'}, packages=[ 'trytond_sentry', ], scripts=[ 'bin/trytond_sentry', ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Plugins', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Office/Business', ], license='GPL-3', install_requires=[ "trytond>=3.0,<3.1", "raven", ], zip_safe=False, )
Set homepage for package as github url
Set homepage for package as github url
Python
bsd-3-clause
fulfilio/trytond-sentry
--- +++ @@ -13,7 +13,7 @@ long_description=open('README.rst').read(), author="Openlabs Technologies & Consulting (P) Limited", author_email="info@openlabs.co.in", - url="http://www.openlabs.co.in", + url="https://github.com/openlabs/trytond-sentry", package_dir={'trytond_sentry': '.'}, packages=[ 'trytond_sentry',
1ad58abffb5b9f768a916620e503b2511509fba5
setup.py
setup.py
import codecs from os import path from setuptools import find_packages, setup def read(*parts): filename = path.join(path.dirname(__file__), *parts) with codecs.open(filename, encoding="utf-8") as fp: return fp.read() setup( author="Pinax Developers", author_email="developers@pinaxproject.com", description="a reusable private user messages application for Django", name="pinax-messages", long_description=read("README.md"), version="1.0.1", url="http://github.com/pinax/pinax-messages/", license="MIT", packages=find_packages(), package_data={ "messages": [] }, test_suite="runtests.runtests", tests_require=[ "django-test-plus>=1.0.11", "pinax-theme-bootstrap>=7.10.0", ], install_requires=[ "django-appconf>=1.0.1", "django-user-accounts>=1.3.1" ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ], zip_safe=False )
import codecs from os import path from setuptools import find_packages, setup def read(*parts): filename = path.join(path.dirname(__file__), *parts) with codecs.open(filename, encoding="utf-8") as fp: return fp.read() setup( author="Pinax Developers", author_email="developers@pinaxproject.com", description="a reusable private user messages application for Django", name="pinax-messages", long_description=read("README.md"), version="1.0.1", url="http://github.com/pinax/pinax-messages/", license="MIT", packages=find_packages(), package_data={ "messages": [] }, test_suite="runtests.runtests", tests_require=[ "django-test-plus>=1.0.11", "pinax-theme-bootstrap>=7.10.1", ], install_requires=[ "django-appconf>=1.0.1", "django-user-accounts>=1.3.1" ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ], zip_safe=False )
Update test requirement for PTB
Update test requirement for PTB
Python
mit
eldarion/user_messages,pinax/pinax-messages,pinax/pinax-messages,eldarion/user_messages
--- +++ @@ -26,7 +26,7 @@ test_suite="runtests.runtests", tests_require=[ "django-test-plus>=1.0.11", - "pinax-theme-bootstrap>=7.10.0", + "pinax-theme-bootstrap>=7.10.1", ], install_requires=[ "django-appconf>=1.0.1",
674dd25bebb21919c27cb78bef2cee2f59f0c922
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup import py2exe setup(console=['CryptoUnLocker.py'])
#!/usr/bin/env python import sys from cx_Freeze import setup, Executable setup( name="CryptoUnLocker", version="1.0", Description="Detection and Decryption tool for CryptoLocker files", executables= [Executable("CryptoUnLocker.py")] )
Switch from py2exe to cx_Freeze
Switch from py2exe to cx_Freeze py2exe had issues with pycrypto. cx_Freeze seems to work.
Python
mit
kyrus/crypto-un-locker,thecocce/crypto-un-locker
--- +++ @@ -1,6 +1,11 @@ #!/usr/bin/env python -from distutils.core import setup -import py2exe +import sys +from cx_Freeze import setup, Executable -setup(console=['CryptoUnLocker.py']) +setup( + name="CryptoUnLocker", + version="1.0", + Description="Detection and Decryption tool for CryptoLocker files", + executables= [Executable("CryptoUnLocker.py")] +)
35e5643db16e2da200c9151b6d6fc53ea2096944
setup.py
setup.py
from setuptools import setup setup(name='polycircles', version='0.1', description='Polycircles: WGS84 Circle approximations using polygons', url='http://github.com/vioozer/servers', author='Adam Matan', author_email='adam@matan.name', license='MIT', packages=['polycircles'], include_package_data=True, install_requires=['geographiclib'], tests_require=['geopy >= 0.99', 'nose >= 1.3.1'], test_suite='polycircles.test', zip_safe=False)
from setuptools import setup setup(name='polycircles', version='0.1', description='Polycircles: WGS84 Circle approximations using polygons', url='http://github.com/vioozer/servers', author='Adam Matan', author_email='adam@matan.name', license='MIT', packages=['polycircles'], include_package_data=True, install_requires=['geographiclib'], tests_require=['geopy >= 0.99', 'nose >= 1.3.0'], test_suite='polycircles.test', zip_safe=False)
Reduce nose version to 1.3.0 for Travis CI.
Reduce nose version to 1.3.0 for Travis CI.
Python
mit
adamatan/polycircles
--- +++ @@ -10,6 +10,6 @@ packages=['polycircles'], include_package_data=True, install_requires=['geographiclib'], - tests_require=['geopy >= 0.99', 'nose >= 1.3.1'], + tests_require=['geopy >= 0.99', 'nose >= 1.3.0'], test_suite='polycircles.test', zip_safe=False)
3ca6feb2d20e6f3d8051a872ccba8a747e31bc51
setup.py
setup.py
#!/usr/bin/env python import sys import os from distutils.core import setup sys.path.insert(0, os.path.dirname(__file__)) from wutils import get_version, generate_version_py generate_version_py(force=False) setup(name='pybindgen', version=get_version(), description='Python Bindings Generator', author='Gustavo Carneiro', author_email='gjcarneiro@gmail.com', url='https://launchpad.net/pybindgen', packages=['pybindgen', 'pybindgen.typehandlers', 'pybindgen.typehandlers.ctypeparser'], )
#!/usr/bin/env python import sys import os from distutils.core import setup sys.path.insert(0, os.path.dirname(__file__)) from wutils import get_version, generate_version_py generate_version_py(force=False) setup(name='PyBindGen', version=get_version(), description='Python Bindings Generator', author='Gustavo Carneiro', author_email='gjcarneiro@gmail.com', url='https://launchpad.net/pybindgen', packages=['pybindgen', 'pybindgen.typehandlers', 'pybindgen.typehandlers.ctypeparser'], )
Revert back to PyBindGen as package name because it's what is already registered in PyPI.
Revert back to PyBindGen as package name because it's what is already registered in PyPI.
Python
lgpl-2.1
gjcarneiro/pybindgen,gjcarneiro/pybindgen,ftalbrecht/pybindgen,ftalbrecht/pybindgen,gjcarneiro/pybindgen,ftalbrecht/pybindgen,gjcarneiro/pybindgen,ftalbrecht/pybindgen
--- +++ @@ -7,7 +7,7 @@ from wutils import get_version, generate_version_py generate_version_py(force=False) -setup(name='pybindgen', +setup(name='PyBindGen', version=get_version(), description='Python Bindings Generator', author='Gustavo Carneiro',
a12bdf5b8ffa0b43fdd0b1ad9b49b47e09815c9c
pymsascoring/score/score.py
pymsascoring/score/score.py
import logging __author__ = "Antonio J. Nebro" __license__ = "GPL" __version__ = "1.0-SNAPSHOT" __status__ = "Development" __email__ = "antonio@lcc.uma.es" logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) class Score: """ Class representing MSA (Multiple Sequence Alignment) scores A msa has to be a Python list containing pairs of (identifier, sequence), as in this example: ((id1, SSSBA), (id2, HHALK), (id3, -HLGS), etc)) Requirements: - All the sequences in an msa must be aligned - The gap character is '-' """ def compute(self, msa) -> float: """ Compute the score :param msa :return: the value of the score """ pass def get_seqs_from_list_of_pairs(self, msa): """ Get the sequences from an msa. :param msa: Python list containing pairs of (identifier, sequence) :return: List of sequences (i.e. "('AB', 'CD', 'EF' )"). """ sequences = [] logger.debug('List of pairs: {0}'.format(msa)) for i in range(len(msa)): sequences.append(msa[i][1]) logger.debug('List of sequences: {0}'.format(sequences)) return sequences
import logging __author__ = "Antonio J. Nebro" __license__ = "GPL" __version__ = "1.0-SNAPSHOT" __status__ = "Development" __email__ = "antonio@lcc.uma.es" logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) class Score: """ Class representing MSA (Multiple Sequence Alignment) scores A msa has to be a Python list containing pairs of (identifier, sequence), as in this example: ((id1, SSSBA), (id2, HHALK), (id3, -HLGS), etc)) Requirements: - All the sequences in an msa must be aligned - The gap character is '-' """ def compute(self, msa) -> float: """ Compute the score :param msa :return: the value of the score """ pass def get_seqs_from_list_of_pairs(self, msa): """ Get the sequences from an msa. :param msa: Python list containing pairs of (identifier, sequence) :return: List of sequences (i.e. "('AB', 'CD', 'EF' )") if all sequences are of the same length. """ sequences = [] logger.debug('List of pairs: {0}'.format(msa)) for i in range(len(msa)): sequences.append(msa[i][1]) logger.debug('List of sequences: {0}'.format(sequences)) return sequences \ if all(len(sequences[0]) == len(seq) for seq in sequences) \ else self._raiser('Sequences are not of the same length.') def _raiser(self, e): raise Exception(e)
Check if all sequences are of the same length
Check if all sequences are of the same length
Python
mit
ajnebro/pyMSAScoring
--- +++ @@ -33,7 +33,7 @@ """ Get the sequences from an msa. :param msa: Python list containing pairs of (identifier, sequence) - :return: List of sequences (i.e. "('AB', 'CD', 'EF' )"). + :return: List of sequences (i.e. "('AB', 'CD', 'EF' )") if all sequences are of the same length. """ sequences = [] @@ -43,4 +43,8 @@ sequences.append(msa[i][1]) logger.debug('List of sequences: {0}'.format(sequences)) - return sequences + return sequences \ + if all(len(sequences[0]) == len(seq) for seq in sequences) \ + else self._raiser('Sequences are not of the same length.') + + def _raiser(self, e): raise Exception(e)
8162cf98f04125b7db1460a42a177eae516660d0
appliance/getready.py
appliance/getready.py
# -*- coding: utf-8 -*- import RPi.GPIO as gpio pin_power=12 pin_light=16 # Setup gpio.setmode(gpio.BOARD) gpio.setup(pin_power, gpio.OUT) gpio.setup(pin_light, gpio.OUT) gpio.output(pin_power, gpio.HIGH) gpio.output(pin_light, gpio.LOW)
# -*- coding: utf-8 -*- import RPi.GPIO as gpio pin_power=3 pin_light=5 # Setup gpio.setmode(gpio.BOARD) gpio.setup(pin_power, gpio.OUT) gpio.setup(pin_light, gpio.OUT) gpio.output(pin_power, gpio.HIGH) gpio.output(pin_light, gpio.LOW)
Modify the pin number due to the hardware case
Modify the pin number due to the hardware case
Python
apache-2.0
kensonman/IMCSmartHome,kensonman/IMCSmartHome,kensonman/IMCSmartHome
--- +++ @@ -2,8 +2,8 @@ import RPi.GPIO as gpio -pin_power=12 -pin_light=16 +pin_power=3 +pin_light=5 # Setup gpio.setmode(gpio.BOARD)
3e689dc769bd4859b4ed73e98d8d559710aa2e14
tools/perf/profile_creators/small_profile_creator.py
tools/perf/profile_creators/small_profile_creator.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from telemetry.core import util from telemetry.page import page_set from telemetry.page import profile_creator class SmallProfileCreator(profile_creator.ProfileCreator): """ Runs a browser through a series of operations to fill in a small test profile. """ def __init__(self): super(SmallProfileCreator, self).__init__() typical_25 = os.path.join(util.GetBaseDir(), 'page_sets', 'typical_25.py') self._page_set = page_set.PageSet.FromFile(typical_25) # Open all links in the same tab save for the last _NUM_TABS links which # are each opened in a new tab. self._NUM_TABS = 5 def TabForPage(self, page, browser): idx = page.page_set.pages.index(page) # The last _NUM_TABS pages open a new tab. if idx <= (len(page.page_set.pages) - self._NUM_TABS): return browser.tabs[0] else: return browser.tabs.New() def MeasurePage(self, _, tab, results): # Can't use WaitForDocumentReadyStateToBeComplete() here due to # crbug.com/280750 . tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from telemetry.core import util from telemetry.page import page_set from telemetry.page import profile_creator class SmallProfileCreator(profile_creator.ProfileCreator): """ Runs a browser through a series of operations to fill in a small test profile. """ def __init__(self): super(SmallProfileCreator, self).__init__() typical_25 = os.path.join(util.GetBaseDir(), 'page_sets', 'typical_25.py') self._page_set = page_set.PageSet.FromFile(typical_25) # Open all links in the same tab save for the last _NUM_TABS links which # are each opened in a new tab. self._NUM_TABS = 5 def TabForPage(self, page, browser): idx = page.page_set.pages.index(page) # The last _NUM_TABS pages open a new tab. if idx <= (len(page.page_set.pages) - self._NUM_TABS): return browser.tabs[0] else: return browser.tabs.New() def MeasurePage(self, _, tab, results): tab.WaitForDocumentReadyStateToBeComplete()
Make profile generator wait for pages to load completely.
[Telemetry] Make profile generator wait for pages to load completely. The bug that causes this not to work is fixed. It is possible that this non-determinism in the profile could lead to flakiness in the session_restore benchmark. BUG=375979 Review URL: https://codereview.chromium.org/318733002 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@274971 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
hgl888/chromium-crosswalk-efl,littlstar/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,jaruba/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,Chilledheart/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,dushu1203/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,ltilve/chromium,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,ltilve/chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,jaruba/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,Jonekee/chromium.src,ltilve/chromium,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,littlstar/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,ltilve/chromium,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,ltilve/chromium,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk
--- +++ @@ -32,6 +32,4 @@ return browser.tabs.New() def MeasurePage(self, _, tab, results): - # Can't use WaitForDocumentReadyStateToBeComplete() here due to - # crbug.com/280750 . - tab.WaitForDocumentReadyStateToBeInteractiveOrBetter() + tab.WaitForDocumentReadyStateToBeComplete()
a61363b23c2fee99c7420cf2371a2711b3bc1eaa
indra/java_vm.py
indra/java_vm.py
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' os.environ['CLASSPATH'] = cp + ':' + os.environ['CLASSPATH'] from jnius import autoclass, JavaException, cast
Include current classpath when starting java VM
Include current classpath when starting java VM
Python
bsd-2-clause
johnbachman/indra,johnbachman/belpy,sorgerlab/indra,jmuhlich/indra,sorgerlab/indra,sorgerlab/belpy,jmuhlich/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,jmuhlich/indra
--- +++ @@ -14,7 +14,7 @@ path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' -os.environ['CLASSPATH'] = cp +os.environ['CLASSPATH'] = cp + ':' + os.environ['CLASSPATH'] from jnius import autoclass, JavaException, cast
f79f5a8494930cd7f64c6471dea631a7e8f35478
slave/skia_slave_scripts/flavor_utils/xsan_build_step_utils.py
slave/skia_slave_scripts/flavor_utils/xsan_build_step_utils.py
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Utilities for ASAN,TSAN,etc. build steps. """ from default_build_step_utils import DefaultBuildStepUtils from py.utils import shell_utils import os class XsanBuildStepUtils(DefaultBuildStepUtils): def Compile(self, target): # Run the xsan_build script. os.environ['GYP_DEFINES'] = self._step.args['gyp_defines'] print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES'] cmd = [ os.path.join('tools', 'xsan_build'), self._step.args['sanitizer'], target, 'BUILDTYPE=%s' % self._step.configuration, ] cmd.extend(self._step.default_make_flags) cmd.extend(self._step.make_flags) shell_utils.run(cmd) def RunFlavoredCmd(self, app, args): # New versions of ASAN run LSAN by default. We're not yet clean for that. os.environ['ASAN_OPTIONS'] = 'detect_leaks=0' # Point TSAN at our suppressions file. os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp' return shell_utils.run([self._PathToBinary(app)] + args)
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Utilities for ASAN,TSAN,etc. build steps. """ from default_build_step_utils import DefaultBuildStepUtils from py.utils import shell_utils import os class XsanBuildStepUtils(DefaultBuildStepUtils): def Compile(self, target): # Run the xsan_build script. os.environ['GYP_DEFINES'] = self._step.args['gyp_defines'] print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES'] cmd = [ os.path.join('tools', 'xsan_build'), self._step.args['sanitizer'], target, 'BUILDTYPE=%s' % self._step.configuration, ] cmd.extend(self._step.default_make_flags) cmd.extend(self._step.make_flags) shell_utils.run(cmd) def RunFlavoredCmd(self, app, args): # TODO(mtklein): Enable symbolize=1 for all these after # figuring out external_symbolizer_path. os.environ['ASAN_OPTIONS'] = 'detect_leaks=1' os.environ['LSAN_OPTIONS'] = \ 'suppressions=tools/lsan.supp print_suppressions=1' os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp' return shell_utils.run([self._PathToBinary(app)] + args)
Enable leak checks on our ASAN bots.
Enable leak checks on our ASAN bots. After updating our suppressions, I had clean LSAN runs of dm and nanobench yesterday on my desktop. Hopefully this will hold true for the bots. BUG=skia: R=borenet@google.com, mtklein@google.com Author: mtklein@chromium.org Review URL: https://codereview.chromium.org/524623002
Python
bsd-3-clause
google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot
--- +++ @@ -27,8 +27,10 @@ shell_utils.run(cmd) def RunFlavoredCmd(self, app, args): - # New versions of ASAN run LSAN by default. We're not yet clean for that. - os.environ['ASAN_OPTIONS'] = 'detect_leaks=0' - # Point TSAN at our suppressions file. + # TODO(mtklein): Enable symbolize=1 for all these after + # figuring out external_symbolizer_path. + os.environ['ASAN_OPTIONS'] = 'detect_leaks=1' + os.environ['LSAN_OPTIONS'] = \ + 'suppressions=tools/lsan.supp print_suppressions=1' os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp' return shell_utils.run([self._PathToBinary(app)] + args)
bdde8bb3ee9a79dc0ae777bb6e226dbc2be18dfb
manuscript/urls.py
manuscript/urls.py
# Copyright (C) 2011 by Christopher Adams # Released under MIT License. See LICENSE.txt in the root of this # distribution for details. from django.conf.urls.defaults import * urlpatterns = patterns('', url(r'^$', 'manuscript.views.all_works', name="all-works"), url(r'^(?P<title>[-\w]+)/$', 'manuscript.views.whole_work', name="show-whole-work"), url(r'^(?P<title>[-\w]+)/list-chapters/$', 'manuscript.views.chapters', name="show-chapters"), url(r'^(?P<title>[-\w]+)/(?P<page>\d+)/$', 'manuscript.views.page', name="show-page"), url(r'^(?P<title>[-\w]+)/(?P<chapter>[-\w]+)/$', 'manuscript.views.chapter', name="show-chapter"), # url(r'^(?P<title>.*)/(?P<model>.*)/?$', 'model_by_work'), # url(r'^(?P<title>.*)/(?P<model>.*)/(?P<id>\d*)/?$', 'element_by_id'), )
# Copyright (C) 2011 by Christopher Adams # Released under MIT License. See LICENSE.txt in the root of this # distribution for details. from django.conf.urls.defaults import * urlpatterns = patterns('', url(r'^$', 'manuscript.views.all_works', name="all-works"), url(r'^(?P<title>[-\w]+)/$', 'manuscript.views.whole_work', name="show-whole-work"), url(r'^(?P<title>[-\w]+)/list-chapters/$', 'manuscript.views.chapters', name="show-chapters"), url(r'^(?P<title>[-\w]+)/(?P<page>\d+)/$', 'manuscript.views.page', name="show-page"), url(r'^(?P<title>[-\w]+)/(?P<chapter>[-\w]+)/$', 'manuscript.views.chapter', name="show-chapter"), # url(r'^(?P<title>.*)/(?P<model>.*)/?$', 'model_by_work'), # url(r'^(?P<title>.*)/(?P<model>.*)/(?P<id>\d*)/?$', 'element_by_id'), # url(r'^img_to_db/run/$', 'wyclif.bin.img_to_db.run_view'), # url(r'^db/runimport/$', 'wyclif.bin.csv_to_db.run_view'), )
Add sample url patterns to views for management functions.
Add sample url patterns to views for management functions.
Python
mit
adamsc64/django-manuscript,adamsc64/django-manuscript
--- +++ @@ -12,6 +12,10 @@ url(r'^(?P<title>[-\w]+)/(?P<chapter>[-\w]+)/$', 'manuscript.views.chapter', name="show-chapter"), # url(r'^(?P<title>.*)/(?P<model>.*)/?$', 'model_by_work'), # url(r'^(?P<title>.*)/(?P<model>.*)/(?P<id>\d*)/?$', 'element_by_id'), + +# url(r'^img_to_db/run/$', 'wyclif.bin.img_to_db.run_view'), +# url(r'^db/runimport/$', 'wyclif.bin.csv_to_db.run_view'), + )
247850851367486d54c8cf3a074e85d1d283e654
message_view.py
message_view.py
import sublime import sublime_plugin PANEL_NAME = "SublimeLinter Messages" OUTPUT_PANEL = "output." + PANEL_NAME def plugin_unloaded(): for window in sublime.windows(): window.destroy_output_panel(PANEL_NAME) class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): panel_view = self.window.create_output_panel(PANEL_NAME, True) panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(0) self.window.run_command("show_panel", {"panel": OUTPUT_PANEL}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand): def run(self): self.window.destroy_output_panel(PANEL_NAME)
import sublime import sublime_plugin PANEL_NAME = "SublimeLinter Messages" OUTPUT_PANEL = "output." + PANEL_NAME def plugin_unloaded(): for window in sublime.windows(): window.destroy_output_panel(PANEL_NAME) class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): panel_view = self.window.create_output_panel(PANEL_NAME) panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(0) self.window.run_command("show_panel", {"panel": OUTPUT_PANEL}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand): def run(self): self.window.destroy_output_panel(PANEL_NAME)
Make the message panel accessible via menu
Make the message panel accessible via menu
Python
mit
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
--- +++ @@ -12,7 +12,7 @@ class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): - panel_view = self.window.create_output_panel(PANEL_NAME, True) + panel_view = self.window.create_output_panel(PANEL_NAME) panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True)
e5db0a11634dc442f77f5550efd5cbf687ea6526
lionschool/core/admin.py
lionschool/core/admin.py
from django.contrib import admin from .models import Grade, Group, Pupil, Teacher, Warden for model in {Grade, Group, Pupil, Teacher, Warden}: admin.site.register(model)
from django.contrib import admin from .models import Grade, Group, Pupil, Teacher, Warden, Course for model in Grade, Group, Pupil, Teacher, Warden, Course: admin.site.register(model)
Add Course field to Admin
Add Course field to Admin Oops! forgot..
Python
bsd-3-clause
Leo2807/lioncore
--- +++ @@ -1,6 +1,6 @@ from django.contrib import admin -from .models import Grade, Group, Pupil, Teacher, Warden +from .models import Grade, Group, Pupil, Teacher, Warden, Course -for model in {Grade, Group, Pupil, Teacher, Warden}: +for model in Grade, Group, Pupil, Teacher, Warden, Course: admin.site.register(model)
fda50fb75b0b0e1d571c825e0a364573b93461bc
mbuild/__init__.py
mbuild/__init__.py
from mbuild.box import Box from mbuild.coarse_graining import coarse_grain from mbuild.coordinate_transform import * from mbuild.compound import * from mbuild.pattern import * from mbuild.packing import * from mbuild.port import Port from mbuild.recipes import * from mbuild.lattice import Lattice from mbuild.recipes import recipes from mbuild.version import version
from mbuild.box import Box from mbuild.coarse_graining import coarse_grain from mbuild.coordinate_transform import * from mbuild.compound import * from mbuild.pattern import * from mbuild.packing import * from mbuild.port import Port from mbuild.lattice import Lattice from mbuild.recipes import recipes from mbuild.version import version
Remove a troubling import *
Remove a troubling import *
Python
mit
iModels/mbuild,iModels/mbuild
--- +++ @@ -5,7 +5,6 @@ from mbuild.pattern import * from mbuild.packing import * from mbuild.port import Port -from mbuild.recipes import * from mbuild.lattice import Lattice from mbuild.recipes import recipes from mbuild.version import version
bd679f26e384ab42ac9edc2e99575dc57b9450ef
singleuser/user-config.py
singleuser/user-config.py
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del f del custom_path # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del f del custom_path # Things that should be non-easily-overridable usernames['wikipedia']['*'] = os.environ['JPY_USER']
Mark the usernames to work only with wikipedia
Mark the usernames to work only with wikipedia Bug: T120334
Python
mit
yuvipanda/paws,yuvipanda/paws
--- +++ @@ -15,4 +15,4 @@ del custom_path # Things that should be non-easily-overridable -usernames['*']['*'] = os.environ['JPY_USER'] +usernames['wikipedia']['*'] = os.environ['JPY_USER']
64398ae731b2f89e126ae9c63fe048134cbf649c
daybed/tests/support.py
daybed/tests/support.py
import os from unittest import TestCase import webtest HERE = os.path.dirname(os.path.abspath(__file__)) class BaseWebTest(TestCase): """Base Web Test to test your cornice service. It setups the database before each test and delete it after. """ def setUp(self): self.app = webtest.TestApp("config:tests.ini", relative_to=HERE) self.db_server = self.app.app.registry.settings['db_server'] def tearDown(self): # Delete Test DB del self.db_server[self.app.app.registry.settings['db_name']] def put_valid_definition(self): """Create a valid definition named "todo". """ # Put a valid definition self.app.put_json('/definitions/todo', self.valid_definition, headers=self.headers)
import os from uuid import uuid4 from unittest import TestCase import webtest HERE = os.path.dirname(os.path.abspath(__file__)) class BaseWebTest(TestCase): """Base Web Test to test your cornice service. It setups the database before each test and delete it after. """ def setUp(self): self.db_name = os.environ['DB_NAME'] = 'daybed-tests-%s' % uuid4() self.app = webtest.TestApp("config:tests.ini", relative_to=HERE) self.db_server = self.app.app.registry.settings['db_server'] def tearDown(self): # Delete Test DB del self.db_server[self.db_name] def put_valid_definition(self): """Create a valid definition named "todo". """ # Put a valid definition self.app.put_json('/definitions/todo', self.valid_definition, headers=self.headers)
Create a random db for the tests each time.
Create a random db for the tests each time.
Python
bsd-3-clause
spiral-project/daybed,spiral-project/daybed
--- +++ @@ -1,4 +1,5 @@ import os +from uuid import uuid4 from unittest import TestCase import webtest @@ -13,12 +14,14 @@ """ def setUp(self): + self.db_name = os.environ['DB_NAME'] = 'daybed-tests-%s' % uuid4() + self.app = webtest.TestApp("config:tests.ini", relative_to=HERE) self.db_server = self.app.app.registry.settings['db_server'] def tearDown(self): # Delete Test DB - del self.db_server[self.app.app.registry.settings['db_name']] + del self.db_server[self.db_name] def put_valid_definition(self): """Create a valid definition named "todo".
6f968a4aa4048163dd55f927a32da2477cd8c1ff
tx_salaries/search_indexes.py
tx_salaries/search_indexes.py
from haystack import indexes from tx_people.models import Organization from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) def get_model(self): return Employee
from haystack import indexes from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): return Employee
Index slugs to reduce search page queries
Index slugs to reduce search page queries
Python
apache-2.0
texastribune/tx_salaries,texastribune/tx_salaries
--- +++ @@ -1,5 +1,4 @@ from haystack import indexes -from tx_people.models import Organization from tx_salaries.models import Employee @@ -8,8 +7,11 @@ content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) + title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) + department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) + entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): return Employee
42c7496beefea0e5d10cbd6e356335efae27a5ec
taiga/projects/migrations/0043_auto_20160530_1004.py
taiga/projects/migrations/0043_auto_20160530_1004.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-05-30 10:04 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('projects', '0042_auto_20160525_0911'), ] operations = [ migrations.AlterField( model_name='project', name='owner', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-05-30 10:04 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('projects', '0040_remove_memberships_of_cancelled_users_acounts'), ] operations = [ migrations.AlterField( model_name='project', name='owner', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_projects', to=settings.AUTH_USER_MODEL, verbose_name='owner'), ), ]
Fix a problem with a migration between master and stable branch
Fix a problem with a migration between master and stable branch
Python
agpl-3.0
dayatz/taiga-back,taigaio/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,xdevelsistemas/taiga-back-community,dayatz/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back
--- +++ @@ -10,7 +10,7 @@ class Migration(migrations.Migration): dependencies = [ - ('projects', '0042_auto_20160525_0911'), + ('projects', '0040_remove_memberships_of_cancelled_users_acounts'), ] operations = [
7352f08852d5d265e4cb79a43e056b666a1877c5
setup.py
setup.py
from setuptools import setup, find_packages setup( name="go_contacts", version="0.1.0a", url='http://github.com/praekelt/go-contacts-api', license='BSD', description="A contacts and groups API for Vumi Go", long_description=open('README.rst', 'r').read(), author='Praekelt Foundation', author_email='dev@praekeltfoundation.org', packages=find_packages(), include_package_data=True, install_requires=[ 'cyclone', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet :: WWW/HTTP', ], )
from setuptools import setup, find_packages setup( name="go_contacts", version="0.1.0a", url='http://github.com/praekelt/go-contacts-api', license='BSD', description="A contacts and groups API for Vumi Go", long_description=open('README.rst', 'r').read(), author='Praekelt Foundation', author_email='dev@praekeltfoundation.org', packages=find_packages(), include_package_data=True, install_requires=[ 'cyclone', 'go_api', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet :: WWW/HTTP', ], )
Add go_api as a dependency.
Add go_api as a dependency.
Python
bsd-3-clause
praekelt/go-contacts-api,praekelt/go-contacts-api
--- +++ @@ -13,6 +13,7 @@ include_package_data=True, install_requires=[ 'cyclone', + 'go_api', ], classifiers=[ 'Development Status :: 4 - Beta',
8565921f7bc42c30534bdc272bd3daaf1e758b1c
setup.py
setup.py
import os from setuptools import setup, find_packages DESCRIPTION = 'Send emails using Django template system' LONG_DESCRIPTION = None try: LONG_DESCRIPTION = open('README.rst').read() except: pass CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", 'Topic :: Software Development :: Libraries :: Python Modules', 'Framework :: Django', ] VERSION = '2.6.0' VERSION = os.environ.get('MAIL_TEMPLATED_VERSION', VERSION) setup( name='django-mail-templated', version=VERSION, packages=find_packages(), include_package_data=True, author='Artem Rizhov', author_email='artem.rizhov@gmail.com', url='https://github.com/artemrizhov/django-mail-templated', license='MIT', description=DESCRIPTION, long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, test_suite='mail_templated.test_utils.run.run_tests', install_requires = ['django'], )
import os from setuptools import setup, find_packages DESCRIPTION = 'Send emails using Django template system' LONG_DESCRIPTION = None try: LONG_DESCRIPTION = open('README.rst').read() except: pass CLASSIFIERS = [ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", 'Topic :: Software Development :: Libraries :: Python Modules', 'Framework :: Django', ] VERSION = '2.6.0' VERSION = os.environ.get('MAIL_TEMPLATED_VERSION', VERSION) setup( name='django-mail-templated', version=VERSION, packages=find_packages(), include_package_data=True, author='Artem Rizhov', author_email='artem.rizhov@gmail.com', url='https://github.com/artemrizhov/django-mail-templated', license='MIT', description=DESCRIPTION, long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, test_suite='mail_templated.test_utils.run.run_tests', )
Remove Django from dependencies to avoid auto upgrade
Remove Django from dependencies to avoid auto upgrade
Python
mit
artemrizhov/django-mail-templated,artemrizhov/django-mail-templated,artemrizhov/django-mail-templated
--- +++ @@ -40,5 +40,4 @@ platforms=['any'], classifiers=CLASSIFIERS, test_suite='mail_templated.test_utils.run.run_tests', - install_requires = ['django'], )
ee9a9b12248d119d561523e8ca1b692f11a56fd7
datasets/forms.py
datasets/forms.py
from django import forms from datasets.models import DatasetRelease, CategoryComment class DatasetReleaseForm(forms.ModelForm): max_number_of_sounds = forms.IntegerField(required=False) class Meta: model = DatasetRelease fields = ['release_tag', 'type'] class PresentNotPresentUnsureForm(forms.Form): vote = forms.ChoiceField( required=True, widget=forms.RadioSelect, choices=( ('1', 'Present and predominant',), ('0.5', 'Present but not predominant',), ('-1', 'Not Present',), ('0', 'Unsure',), ), ) annotation_id = forms.IntegerField( required=True, widget=forms.HiddenInput, ) visited_sound = forms.BooleanField( required=False, initial=False, widget=forms.HiddenInput, ) class CategoryCommentForm(forms.ModelForm): class Meta: model = CategoryComment fields = ['comment', 'category_id', 'dataset'] widgets = { 'comment': forms.Textarea(attrs={ 'cols': 80, 'rows': 3, 'placeholder': 'Add here any general comments you want to make about this category'}), 'category_id': forms.HiddenInput, 'dataset_id': forms.HiddenInput, }
from django import forms from datasets.models import DatasetRelease, CategoryComment class DatasetReleaseForm(forms.ModelForm): max_number_of_sounds = forms.IntegerField(required=False) class Meta: model = DatasetRelease fields = ['release_tag', 'type'] class PresentNotPresentUnsureForm(forms.Form): vote = forms.ChoiceField( required=True, widget=forms.RadioSelect, choices=( ('1', 'Present and predominant',), ('0.5', 'Present but not predominant',), ('-1', 'Not present',), ('0', 'Unsure',), ), ) annotation_id = forms.IntegerField( required=True, widget=forms.HiddenInput, ) visited_sound = forms.BooleanField( required=False, initial=False, widget=forms.HiddenInput, ) class CategoryCommentForm(forms.ModelForm): class Meta: model = CategoryComment fields = ['comment', 'category_id', 'dataset'] widgets = { 'comment': forms.Textarea(attrs={ 'cols': 80, 'rows': 3, 'placeholder': 'Add here any general comments you want to make about this category'}), 'category_id': forms.HiddenInput, 'dataset_id': forms.HiddenInput, }
Remove upper case Not Present
Remove upper case Not Present
Python
agpl-3.0
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
--- +++ @@ -17,7 +17,7 @@ choices=( ('1', 'Present and predominant',), ('0.5', 'Present but not predominant',), - ('-1', 'Not Present',), + ('-1', 'Not present',), ('0', 'Unsure',), ), )
e43b91412ee0899bb6b851a760dd06bce263d099
Instanssi/ext_blog/templatetags/blog_tags.py
Instanssi/ext_blog/templatetags/blog_tags.py
# -*- coding: utf-8 -*- from django import template from django.conf import settings from Instanssi.ext_blog.models import BlogEntry register = template.Library() @register.inclusion_tag('ext_blog/blog_messages.html') def render_blog(event_id): entries = BlogEntry.objects.filter(event_id__lte=int(event_id), public=True).order_by('-date')[:10] return {'entries': entries} @register.inclusion_tag('ext_blog/blog_rss_tag.html') def render_blog_rss_tag(): return {} @register.simple_tag def blog_rss_url(): return 'http://'+settings.DOMAIN+'/blog/rss/'
# -*- coding: utf-8 -*- from django import template from django.conf import settings from Instanssi.ext_blog.models import BlogEntry register = template.Library() @register.inclusion_tag('ext_blog/blog_messages.html') def render_blog(event_id, max_posts=10): entries = BlogEntry.objects.filter(event_id__lte=int(event_id), public=True).order_by('-date')[:max_posts] return {'entries': entries} @register.inclusion_tag('ext_blog/blog_rss_tag.html') def render_blog_rss_tag(): return {} @register.simple_tag def blog_rss_url(): return 'http://'+settings.DOMAIN+'/blog/rss/'
Allow customizing number of posts displayed
ext_blog: Allow customizing number of posts displayed
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
--- +++ @@ -7,8 +7,8 @@ register = template.Library() @register.inclusion_tag('ext_blog/blog_messages.html') -def render_blog(event_id): - entries = BlogEntry.objects.filter(event_id__lte=int(event_id), public=True).order_by('-date')[:10] +def render_blog(event_id, max_posts=10): + entries = BlogEntry.objects.filter(event_id__lte=int(event_id), public=True).order_by('-date')[:max_posts] return {'entries': entries} @register.inclusion_tag('ext_blog/blog_rss_tag.html')
c8cc85f0d10093ae9cd42ee4cc7dabef46718645
ood/controllers/simple.py
ood/controllers/simple.py
import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): self.state = SimpleServerState.objects.get(ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response
import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): self.state, _ = SimpleServerState.objects.get_or_create( ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response
Create SimpleServerState object if it doesn't exist.
Create SimpleServerState object if it doesn't exist.
Python
mit
markrcote/ood,markrcote/ood,markrcote/ood,markrcote/ood
--- +++ @@ -7,7 +7,8 @@ class SimpleServerController(object): def __init__(self, ood_instance): - self.state = SimpleServerState.objects.get(ood=ood_instance) + self.state, _ = SimpleServerState.objects.get_or_create( + ood=ood_instance) self.mcc = Client(ood_instance) def start(self):
a7919b78c96128cc5bcfda759da11f6b067d0041
tests/__init__.py
tests/__init__.py
import base64 import unittest def setup_package(self): pass def teardown_package(self): pass class BaseS3EncryptTest(unittest.TestCase): def decode64(self, data): return base64.b64decode(data) def encode64(self, data): return base64.b64encode(data)
import base64 import codecs import unittest def setup_package(self): pass def teardown_package(self): pass class BaseS3EncryptTest(unittest.TestCase): def decode64(self, data): return base64.b64decode(codecs.decode(data, 'utf-8')) def encode64(self, data): return codecs.encode(base64.b64encode(data), 'utf-8')
Make test helpers py 3 compatable
Make test helpers py 3 compatable
Python
bsd-3-clause
boldfield/s3-encryption
--- +++ @@ -1,4 +1,5 @@ import base64 +import codecs import unittest @@ -13,7 +14,7 @@ class BaseS3EncryptTest(unittest.TestCase): def decode64(self, data): - return base64.b64decode(data) + return base64.b64decode(codecs.decode(data, 'utf-8')) def encode64(self, data): - return base64.b64encode(data) + return codecs.encode(base64.b64encode(data), 'utf-8')
4e70bc00a7b3fb96302ac6c2a29e463e07eabbb0
tests/__init__.py
tests/__init__.py
import logging import unittest from cassandra.cluster import Cluster from cassandra.connection import _loop from cassandra.policies import HostDistance log = logging.getLogger() log.setLevel('DEBUG') log.addHandler(logging.StreamHandler()) existing_keyspaces = None def setup_package(): try: cluster = Cluster() cluster.set_core_connections_per_host(HostDistance.LOCAL, 1) cluster.set_max_connections_per_host(HostDistance.LOCAL, 1) session = cluster.connect() except Exception, exc: log.error('Failed to connect to cluster:') log.error(exc) raise unittest.SkipTest('Failed to connect to cluster: %r' % exc) try: global existing_keyspaces results = session.execute("SELECT keyspace_name FROM system.schema_keyspaces") existing_keyspaces = set([row.values()[0] for row in results]) finally: try: cluster.shutdown() except Exception, exc: log.error('Failed to connect to cluster:') log.error(exc) raise unittest.SkipTest('Failed to connect to cluster: %r' % exc) def teardown_package(): try: cluster = Cluster() cluster.set_core_connections_per_host(HostDistance.LOCAL, 1) cluster.set_max_connections_per_host(HostDistance.LOCAL, 1) session = cluster.connect() except Exception, exc: log.error('Failed to connect to cluster:') log.error(exc) raise unittest.SkipTest('Failed to connect to cluster: %r' % exc) try: if existing_keyspaces: results = session.execute("SELECT keyspace_name FROM system.schema_keyspaces") current_keyspaces = set([row.values()[0] for row in results]) for keyspace in current_keyspaces - existing_keyspaces: session.execute("DROP KEYSPACE %s" % (keyspace,)) finally: try: cluster.shutdown() _loop.stop() except Exception, exc: log.error('Failed to connect to cluster:') log.error(exc)
Test package setup and teardown
Test package setup and teardown
Python
apache-2.0
HackerEarth/cassandra-python-driver,jfelectron/python-driver,tempbottle/python-driver,stef1927/python-driver,kishkaru/python-driver,coldeasy/python-driver,jfelectron/python-driver,HackerEarth/cassandra-python-driver,stef1927/python-driver,aholmberg/python-driver,yi719/python-driver,mike-tr-adamson/python-driver,datastax/python-driver,bbirand/python-driver,mike-tr-adamson/python-driver,vipjml/python-driver,tempbottle/python-driver,thelastpickle/python-driver,aholmberg/python-driver,thelastpickle/python-driver,mambocab/python-driver,mobify/python-driver,jregovic/python-driver,markflorisson/python-driver,bbirand/python-driver,kishkaru/python-driver,kracekumar/python-driver,datastax/python-driver,beobal/python-driver,beobal/python-driver,jregovic/python-driver,vipjml/python-driver,yi719/python-driver,mambocab/python-driver,coldeasy/python-driver,thobbs/python-driver,thobbs/python-driver,markflorisson/python-driver,sontek/python-driver,sontek/python-driver,kracekumar/python-driver,mobify/python-driver
--- +++ @@ -0,0 +1,62 @@ +import logging +import unittest + +from cassandra.cluster import Cluster +from cassandra.connection import _loop +from cassandra.policies import HostDistance + +log = logging.getLogger() +log.setLevel('DEBUG') +log.addHandler(logging.StreamHandler()) + +existing_keyspaces = None + +def setup_package(): + try: + cluster = Cluster() + cluster.set_core_connections_per_host(HostDistance.LOCAL, 1) + cluster.set_max_connections_per_host(HostDistance.LOCAL, 1) + session = cluster.connect() + except Exception, exc: + log.error('Failed to connect to cluster:') + log.error(exc) + raise unittest.SkipTest('Failed to connect to cluster: %r' % exc) + + try: + global existing_keyspaces + results = session.execute("SELECT keyspace_name FROM system.schema_keyspaces") + existing_keyspaces = set([row.values()[0] for row in results]) + finally: + try: + cluster.shutdown() + except Exception, exc: + log.error('Failed to connect to cluster:') + log.error(exc) + raise unittest.SkipTest('Failed to connect to cluster: %r' % exc) + + +def teardown_package(): + try: + cluster = Cluster() + cluster.set_core_connections_per_host(HostDistance.LOCAL, 1) + cluster.set_max_connections_per_host(HostDistance.LOCAL, 1) + session = cluster.connect() + except Exception, exc: + log.error('Failed to connect to cluster:') + log.error(exc) + raise unittest.SkipTest('Failed to connect to cluster: %r' % exc) + + try: + if existing_keyspaces: + results = session.execute("SELECT keyspace_name FROM system.schema_keyspaces") + current_keyspaces = set([row.values()[0] for row in results]) + for keyspace in current_keyspaces - existing_keyspaces: + session.execute("DROP KEYSPACE %s" % (keyspace,)) + + finally: + try: + cluster.shutdown() + _loop.stop() + except Exception, exc: + log.error('Failed to connect to cluster:') + log.error(exc)
24ed97f09707a404bf81062a99a485c547d92d11
accio/webhooks/views.py
accio/webhooks/views.py
from django.http.response import HttpResponse def webhook(request): return HttpResponse('Webhooks not implemented yet', status=501)
from django.http.response import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def webhook(request): return HttpResponse('Webhooks not implemented yet', status=501)
Add csrf_exempt to webhook view
fix: Add csrf_exempt to webhook view
Python
mit
relekang/accio,relekang/accio,relekang/accio
--- +++ @@ -1,5 +1,7 @@ from django.http.response import HttpResponse +from django.views.decorators.csrf import csrf_exempt +@csrf_exempt def webhook(request): return HttpResponse('Webhooks not implemented yet', status=501)
936cb7ad48a2ab509127fa2eb4cc84af9b3dbe2a
spicedham/nonsensefilter.py
spicedham/nonsensefilter.py
import operator from itertools import imap, repeat from spicedham.config import load_config from spicedham.baseplugin import BasePlugin class NonsenseFilter(BasePlugin): """ Filter messages with no words in the database. """ def __init__(self, config, backend): """ Get values from the config. """ self.backend = backend nonsensefilter_config = config.get('nonsensefilter', {}) self.filter_match = nonsensefilter_config.get('filter_match', 1) self.filter_miss = nonsensefilter_config.get('filter_miss', None) def train(self, response, value): """ Set each word to True. """ self.backend.set_key_list(self.__class__.__name__, {(word, True) for word in response}) # TODO: Will match responses consisting of only '' def classify(self, response): """ If the message contains only words not found in the database return filter_match. Else return filter_miss. """ classifier = self.__class__.__name__ list_in_dict = lambda x, y: not self.backend.get_key(x, y, False) if all(imap(list_in_dict, repeat(classifier), response)): return self.filter_match else: return self.filter_miss
import operator from itertools import imap, repeat from spicedham.baseplugin import BasePlugin class NonsenseFilter(BasePlugin): """ Filter messages with no words in the database. """ def __init__(self, config, backend): """ Get values from the config. """ self.backend = backend nonsensefilter_config = config.get('nonsensefilter', {}) self.filter_match = nonsensefilter_config.get('filter_match', 1) self.filter_miss = nonsensefilter_config.get('filter_miss', None) def train(self, response, value): """ Set each word to True. """ self.backend.set_key_list(self.__class__.__name__, {(word, True) for word in response}) # TODO: Will match responses consisting of only '' def classify(self, response): """ If the message contains only words not found in the database return filter_match. Else return filter_miss. """ classifier = self.__class__.__name__ list_in_dict = lambda x, y: not self.backend.get_key(x, y, False) if all(imap(list_in_dict, repeat(classifier), response)): return self.filter_match else: return self.filter_miss
Remove extraneous and problematic import
Remove extraneous and problematic import
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
--- +++ @@ -1,7 +1,6 @@ import operator from itertools import imap, repeat -from spicedham.config import load_config from spicedham.baseplugin import BasePlugin class NonsenseFilter(BasePlugin):
75345f55679437b418d9645371efc647b0d7db6c
filestore/api.py
filestore/api.py
from __future__ import absolute_import, division, print_function from .commands import insert_resource, insert_datum, retrieve
from __future__ import absolute_import, division, print_function from .commands import insert_resource, insert_datum, retrieve from .retrieve import register_handler
Add register_handler with the API.
API: Add register_handler with the API.
Python
bsd-3-clause
ericdill/fileStore,ericdill/databroker,ericdill/databroker,danielballan/filestore,ericdill/fileStore,stuwilkins/filestore,danielballan/filestore,NSLS-II/filestore,stuwilkins/filestore,tacaswell/filestore
--- +++ @@ -1,3 +1,4 @@ from __future__ import absolute_import, division, print_function from .commands import insert_resource, insert_datum, retrieve +from .retrieve import register_handler
86fdcd6575a944a378a9c3f5b292fb33a6c42853
digestive/hash.py
digestive/hash.py
import hashlib from digestive.io import Sink class HashDigest(Sink): def __init__(self, name, digest): super().__init__(name) self._digest = digest def update(self, data): self._digest.update(data) def digest(self): return self._digest.hexdigest() class MD5(HashDigest): def __init__(self): super().__init__('md5', hashlib.md5()) class SHA1(HashDigest): def __init__(self): super().__init__('sha1', hashlib.sha1()) class SHA256(HashDigest): def __init__(self): super().__init__('sha2-256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): super().__init__('sha2-512', hashlib.sha512())
import hashlib from digestive.io import Sink class HashDigest(Sink): def __init__(self, name, digest): super().__init__(name) self._digest = digest def update(self, data): self._digest.update(data) def digest(self): return self._digest.hexdigest() class MD5(HashDigest): def __init__(self): super().__init__('md5', hashlib.md5()) class SHA1(HashDigest): def __init__(self): super().__init__('sha1', hashlib.sha1()) class SHA256(HashDigest): def __init__(self): super().__init__('sha256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): super().__init__('sha512', hashlib.sha512())
Make sha256 and sha512 sink names correspond to their commandline arguments
Make sha256 and sha512 sink names correspond to their commandline arguments
Python
isc
akaIDIOT/Digestive
--- +++ @@ -27,9 +27,9 @@ class SHA256(HashDigest): def __init__(self): - super().__init__('sha2-256', hashlib.sha256()) + super().__init__('sha256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): - super().__init__('sha2-512', hashlib.sha512()) + super().__init__('sha512', hashlib.sha512())
dcd6d830033914a0ccf26822d6f305c084b90987
f8a_jobs/defaults.py
f8a_jobs/defaults.py
#!/usr/bin/env python3 import os from datetime import timedelta _BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__)) DEFAULT_SERVICE_PORT = 34000 SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml') DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs') TOKEN_VALID_TIME = timedelta(days=14) AUTH_ORGANIZATION = 'fabric8-analytics' GITHUB_CONSUMER_KEY = os.getenv('GITHUB_CONSUMER_KEY', 'not-set') GITHUB_CONSUMER_SECRET = os.getenv('GITHUB_CONSUMER_SECRET', 'not-set') GITHUB_ACCESS_TOKENS = os.getenv('GITHUB_ACCESS_TOKENS', '').split(',') APP_SECRET_KEY = os.getenv('APP_SECRET_KEY', 'not-set') AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('AWS_SQS_REGION', 'us-east-1') # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true')
#!/usr/bin/env python3 import os from datetime import timedelta _BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__)) DEFAULT_SERVICE_PORT = 34000 SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml') DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs') TOKEN_VALID_TIME = timedelta(days=14) AUTH_ORGANIZATION = 'fabric8-analytics' GITHUB_CONSUMER_KEY = os.getenv('GITHUB_CONSUMER_KEY', 'not-set') GITHUB_CONSUMER_SECRET = os.getenv('GITHUB_CONSUMER_SECRET', 'not-set') GITHUB_ACCESS_TOKENS = os.getenv('GITHUB_ACCESS_TOKENS', '').split(',') APP_SECRET_KEY = os.getenv('APP_SECRET_KEY', 'not-set') AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('DEPLOYMENT_PREFIX', os.getenv('USER')) # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true')
Fix wrong variable reference in configuration
Fix wrong variable reference in configuration
Python
apache-2.0
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
--- +++ @@ -16,7 +16,7 @@ AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') -DEPLOYMENT_PREFIX = os.getenv('AWS_SQS_REGION', 'us-east-1') +DEPLOYMENT_PREFIX = os.getenv('DEPLOYMENT_PREFIX', os.getenv('USER')) # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true')
25e5b39113994769c01bf6a79a9ca65764861ab3
spicedham/__init__.py
spicedham/__init__.py
from pkg_resources import iter_entry_points from spicedham.config import config # TODO: Wrap all of this in an object with this in an __init__ function plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in plugins: value = plugin.classify(tag, classification_data) # Skip plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
from pkg_resources import iter_entry_points from spicedham.config import load_config _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ if _plugins == None load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
Fix code which executes on module load.
Fix code which executes on module load.
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
--- +++ @@ -1,19 +1,26 @@ from pkg_resources import iter_entry_points -from spicedham.config import config +from spicedham.config import load_config -# TODO: Wrap all of this in an object with this in an __init__ function -plugins = [] -for plugin in iter_entry_points(group='spicedham.classifiers', name=None): - pluginClass = plugin.load() - plugins.append(pluginClass()) +_plugins = None + +def load_plugins(): + """ + If not already loaded, load plugins. + """ + if _plugins == None + load_config() + _plugins = [] + for plugin in iter_entry_points(group='spicedham.classifiers', name=None): + pluginClass = plugin.load() + _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ - for plugin in plugins: + for plugin in _plugins: plugin.train(tag, training_data, is_spam) @@ -23,13 +30,13 @@ """ average_score = 0 total = 0 - for plugin in plugins: + for plugin in _plugins: value = plugin.classify(tag, classification_data) - # Skip plugins which give a score of None + # Skip _plugins which give a score of None if value != None: total += 1 average_score += value - # On rare occasions no plugins will give scores. If so, return 0 + # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else:
6d23a879a40b9e94f1c568dc6f97e42001b4203c
vcspull/__about__.py
vcspull/__about__.py
__title__ = 'vcspull' __package_name__ = 'vcspull' __description__ = 'synchronize your repos' __version__ = '1.0.3' __author__ = 'Tony Narlock' __email__ = 'tony@git-pull.com' __license__ = 'BSD' __copyright__ = 'Copyright 2013-2016 Tony Narlock'
__title__ = 'vcspull' __package_name__ = 'vcspull' __description__ = 'synchronize your repos' __version__ = '1.0.3' __author__ = 'Tony Narlock' __email__ = 'tony@git-pull.com' __license__ = 'MIT' __copyright__ = 'Copyright 2013-2016 Tony Narlock'
Update LICENSE in package metadata
Update LICENSE in package metadata
Python
mit
tony/vcspull,tony/vcspull
--- +++ @@ -4,5 +4,5 @@ __version__ = '1.0.3' __author__ = 'Tony Narlock' __email__ = 'tony@git-pull.com' -__license__ = 'BSD' +__license__ = 'MIT' __copyright__ = 'Copyright 2013-2016 Tony Narlock'
8e76b64fa3eafa9d22fc37b68ddb1daff6633119
thinglang/parser/tokens/functions.py
thinglang/parser/tokens/functions.py
from thinglang.lexer.symbols.base import LexicalIdentifier from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) self.target = [x.value for x in slice if isinstance(x, LexicalIdentifier)] def describe(self): return '.'.join(self.target) class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) self.target, self.arguments = slice self.value = self if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1]
from thinglang.lexer.symbols.base import LexicalIdentifier, LexicalAccess from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) self.target = [x for x in slice if not isinstance(x, LexicalAccess)] def describe(self): return '.'.join(str(x) for x in self.target) class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) self.target, self.arguments = slice self.value = self if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1]
Use original LexicalIDs in Access
Use original LexicalIDs in Access
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
--- +++ @@ -1,4 +1,4 @@ -from thinglang.lexer.symbols.base import LexicalIdentifier +from thinglang.lexer.symbols.base import LexicalIdentifier, LexicalAccess from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType @@ -7,10 +7,10 @@ class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) - self.target = [x.value for x in slice if isinstance(x, LexicalIdentifier)] + self.target = [x for x in slice if not isinstance(x, LexicalAccess)] def describe(self): - return '.'.join(self.target) + return '.'.join(str(x) for x in self.target) class ArgumentListPartial(ListInitializationPartial):
22e90cf883fa0b6d4c8acb282ebe28929f6d9487
nhs/patents/models.py
nhs/patents/models.py
from django.db import models from nhs.prescriptions.models import Product class Patent(models.Model): drug = models.ForeignKey(Product) expiry_date = models.DateField() start_date = models.DateField(null=True, blank=True) # Stupid. But you know, they're called patent numbers. # Except they have letters in them. number = models.CharField(max_length=200, null=True, blank=True)
from django.db import models from nhs.prescriptions.models import Product class Patent(models.Model): drug = models.ForeignKey(Product) expiry_date = models.DateField() start_date = models.DateField(null=True, blank=True) # Stupid. But you know, they're called patent numbers. # Except they have letters in them. number = models.CharField(max_length=200, null=True, blank=True) source = models.CharField(max_length=200, null=True, blank=True)
Add that field in the model
Add that field in the model
Python
agpl-3.0
openhealthcare/open-prescribing,openhealthcare/open-prescribing,openhealthcare/open-prescribing
--- +++ @@ -9,3 +9,4 @@ # Stupid. But you know, they're called patent numbers. # Except they have letters in them. number = models.CharField(max_length=200, null=True, blank=True) + source = models.CharField(max_length=200, null=True, blank=True)
c74b444f75441a6ccf9d9305f956ec4443f6ec01
dockci/session.py
dockci/session.py
""" Session interface switcher """ from flask.sessions import SecureCookieSessionInterface, SessionMixin from dockci.util import is_api_request class FakeSession(dict, SessionMixin): """ Transient session-like object """ pass class SessionSwitchInterface(SecureCookieSessionInterface): """ Session interface that uses ``SecureCookieSessionInterface`` methods, unless there's no session cookie and it's an API request """ def __init__(self, app): self.app = app def open_session(self, app, request): session_id = request.cookies.get(self.app.session_cookie_name) if not session_id and is_api_request(request): return FakeSession() return super(SessionSwitchInterface, self).open_session( app, request, ) def save_session(self, app, session, response): if isinstance(session, FakeSession): return return super(SessionSwitchInterface, self).save_session( app, session, response, )
""" Session interface switcher """ from flask.sessions import SecureCookieSessionInterface, SessionMixin from dockci.util import is_api_request class FakeSession(dict, SessionMixin): """ Transient session-like object """ pass class SessionSwitchInterface(SecureCookieSessionInterface): """ Session interface that uses ``SecureCookieSessionInterface`` methods, unless there's no session cookie and it's an API request """ def __init__(self, app): self.app = app def open_session(self, app, request): session_id = request.cookies.get(self.app.session_cookie_name) if not session_id and is_api_request(): return FakeSession() return super(SessionSwitchInterface, self).open_session( app, request, ) def save_session(self, app, session, response): if isinstance(session, FakeSession): return return super(SessionSwitchInterface, self).save_session( app, session, response, )
Use global request context, because None url_rule sometimes
Use global request context, because None url_rule sometimes
Python
isc
sprucedev/DockCI-Agent,sprucedev/DockCI,sprucedev/DockCI,sprucedev/DockCI,RickyCook/DockCI,sprucedev/DockCI,RickyCook/DockCI,sprucedev/DockCI-Agent,RickyCook/DockCI,RickyCook/DockCI
--- +++ @@ -22,7 +22,7 @@ def open_session(self, app, request): session_id = request.cookies.get(self.app.session_cookie_name) - if not session_id and is_api_request(request): + if not session_id and is_api_request(): return FakeSession() return super(SessionSwitchInterface, self).open_session(
5597c9db9067ce466697f75949d47d7f94077fae
tests/test_forms.py
tests/test_forms.py
from django.forms import ModelForm, RadioSelect from .models import Product, Option class ProductForm(ModelForm): class Meta: model = Product class OptionForm(ModelForm): class Meta: model = Option def test_post(rf): req = rf.post('/', {'price': '2.12'}) form = ProductForm(req.POST) obj = form.save(commit=False) assert obj.price == 2.12 def test_edit(): p = Product(price=2.34) form = ProductForm(instance=p) html = form['price'].as_widget() assert 'type="number"' in html assert 'step="0.01"' in html assert '2.34' in html assert '$2.34' not in html def test_select(): form = OptionForm(instance=Option(price=0.5)) html = form['price'].as_widget() assert html.startswith('<select') assert 'selected' in html def test_radio(): class RadioForm(ModelForm): class Meta: model = Option widgets = { 'price': RadioSelect } form = RadioForm(instance=Option(price=0.5)) html = form['price'].as_widget() assert 'radio' in html assert 'checked' in html def test_edit_null(): form = OptionForm() print str(form)
from django.forms import ModelForm, RadioSelect from .models import Product, Option class ProductForm(ModelForm): class Meta: model = Product class OptionForm(ModelForm): class Meta: model = Option def test_post(rf): req = rf.post('/', {'price': '2.12'}) form = ProductForm(req.POST) obj = form.save(commit=False) assert obj.price == 2.12 def test_edit(): p = Product(price=2.34) form = ProductForm(instance=p) html = form['price'].as_widget() assert 'type="number"' in html assert 'step="0.01"' in html assert '2.34' in html assert '$2.34' not in html def test_select(): form = OptionForm(instance=Option(price=0.5)) html = form['price'].as_widget() assert html.startswith('<select') assert 'value="0.50"' in html assert 'selected' in html def test_radio(): class RadioForm(ModelForm): class Meta: model = Option widgets = { 'price': RadioSelect } form = RadioForm(instance=Option(price=0.5)) html = form['price'].as_widget() assert 'radio' in html assert 'value="0.50"' in html assert 'checked' in html def test_edit_null(): form = OptionForm() print str(form)
Add tests for proper values in select/radios
Add tests for proper values in select/radios
Python
bsd-2-clause
Suor/django-easymoney
--- +++ @@ -35,6 +35,7 @@ html = form['price'].as_widget() assert html.startswith('<select') + assert 'value="0.50"' in html assert 'selected' in html @@ -50,6 +51,7 @@ html = form['price'].as_widget() assert 'radio' in html + assert 'value="0.50"' in html assert 'checked' in html
b4f17dfd004cf0033e1aeccbb9e75a07bbe35cfa
competition_scripts/interop/tra.py
competition_scripts/interop/tra.py
import argparse from time import time try: # Python 3 from xmlrpc.client import ServerProxy except ImportError: # Python 2 from SimpleXMLRPCServer import ServerProxy if __name__ == '__main__': parser = argparse.ArgumentParser( description='AUVSI SUAS TRA') parser.add_argument( '--url', dest='url', help='Interoperability Client URL, example: http://10.10.130.10:80', required=True) cmd_args = parser.parse_args() print("[*] Starting Target Recognition Application...") try: print('[*] Use Control-C to exit') except KeyboardInterrupt: print('Exiting')
import argparse from time import time try: # Python 3 from xmlrpc.client import ServerProxy except ImportError: # Python 2 from SimpleXMLRPCServer import ServerProxy if __name__ == '__main__': parser = argparse.ArgumentParser( description='AUVSI SUAS TRA') parser.add_argument( '--url', dest='url', help='Interoperability Client URL, example: http://10.10.130.10:80', required=True) cmd_args = parser.parse_args() print("[*] Starting Target Recognition Application...") server = ServerProxy(cmd_args.url) print('Server Info: {}'.format(server.server_info())) try: print('[*] Use Control-C to exit') except KeyboardInterrupt: print('Exiting')
Add initialization for ServerProxy object
Add initialization for ServerProxy object
Python
mit
FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition
--- +++ @@ -20,6 +20,9 @@ print("[*] Starting Target Recognition Application...") + server = ServerProxy(cmd_args.url) + print('Server Info: {}'.format(server.server_info())) + try: print('[*] Use Control-C to exit') except KeyboardInterrupt:
d6da05f79d62f90d8d03908197a0389b67535aa5
halfedge_mesh.py
halfedge_mesh.py
class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] def read_off(self, filename): class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. Args: x: x-coordinate of the point y: y-coordinate of the point z: z-coordinate of the point index: integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh()
class HalfedgeMesh: def __init__(self, filename=None): """Make an empty halfedge mesh.""" self.vertices = [] self.halfedges = [] self.facets = [] def parse_off(self, filename): """Parses OFF files and returns a set of vertices, halfedges, and facets. """ pass def get_halfedge(self, u, v): """Retrieve halfedge with starting vertex u and target vertex v u - starting vertex v - target vertex Returns a halfedge """ pass class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. x - x-coordinate of the point y - y-coordinate of the point z - z-coordinate of the point index - integer id of this vertex """ pass def halfedges(self): """Return a list of halfedges targeting to this vertex.""" pass class Facet: def __init__(self, index): """Create a facet with the given index.""" pass def halfedges(self): """Return halfedges going ccw around this facet.""" pass class Halfedge: def __init__(self, index): """Create a halfedge with given index.""" pass def opposite(self): """Return the opposite halfedge.""" pass def next(self): """Return the opposite halfedge.""" pass def prev(self): """Return the opposite halfedge.""" pass def vertex(self): """Return the target vertex.""" pass def facet(self): """Return the incident facet.""" pass if __name__ == '__main__': m = HalfedgeMesh()
Add parse_off stub and change docstring
Add parse_off stub and change docstring I follow the TomDoc format for docstrings.
Python
mit
carlosrojas/halfedge_mesh
--- +++ @@ -5,17 +5,30 @@ self.halfedges = [] self.facets = [] - def read_off(self, filename): + def parse_off(self, filename): + """Parses OFF files and returns a set of vertices, halfedges, and + facets. + """ + pass + + def get_halfedge(self, u, v): + """Retrieve halfedge with starting vertex u and target vertex v + + u - starting vertex + v - target vertex + + Returns a halfedge + """ + pass class Vertex: def __init__(self, x, y, z, index): """Create a vertex with given index at given point. - Args: - x: x-coordinate of the point - y: y-coordinate of the point - z: z-coordinate of the point - index: integer id of this vertex + x - x-coordinate of the point + y - y-coordinate of the point + z - z-coordinate of the point + index - integer id of this vertex """ pass
65c55a6a4920d67b10d705909864124776d3a2dc
plugins/generic/syntax.py
plugins/generic/syntax.py
#!/usr/bin/env python """ Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import re from lib.core.exception import SqlmapUndefinedMethod class Syntax: """ This class defines generic syntax functionalities for plugins. """ def __init__(self): pass @staticmethod def _escape(expression, quote=True, escaper=None): retVal = expression if quote: for item in re.findall(r"'[^']+'", expression, re.S): retVal = retVal.replace(item, escaper(item[1:-1])) else: retVal = escaper(expression) return retVal @staticmethod def escape(expression, quote=True): errMsg = "'escape' method must be defined " errMsg += "inside the specific DBMS plugin" raise SqlmapUndefinedMethod(errMsg)
#!/usr/bin/env python """ Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import re from lib.core.exception import SqlmapUndefinedMethod class Syntax: """ This class defines generic syntax functionalities for plugins. """ def __init__(self): pass @staticmethod def _escape(expression, quote=True, escaper=None): retVal = expression if quote: for item in re.findall(r"'[^']*'+", expression, re.S): retVal = retVal.replace(item, escaper(item[1:-1])) else: retVal = escaper(expression) return retVal @staticmethod def escape(expression, quote=True): errMsg = "'escape' method must be defined " errMsg += "inside the specific DBMS plugin" raise SqlmapUndefinedMethod(errMsg)
Fix for escaping single quote character(s)
Fix for escaping single quote character(s)
Python
mit
dtrip/.ubuntu,RexGene/monsu-server,dtrip/.ubuntu,RexGene/monsu-server
--- +++ @@ -22,7 +22,7 @@ retVal = expression if quote: - for item in re.findall(r"'[^']+'", expression, re.S): + for item in re.findall(r"'[^']*'+", expression, re.S): retVal = retVal.replace(item, escaper(item[1:-1])) else: retVal = escaper(expression)
a1a9852f478258b2c2f6f28a0ee28f223adaa299
src/myarchive/db/tables/ljtables.py
src/myarchive/db/tables/ljtables.py
from sqlalchemy import ( LargeBinary, Boolean, Column, Integer, String, PickleType, ForeignKey) from sqlalchemy.orm import backref, relationship from sqlalchemy.orm.exc import NoResultFound from myarchive.db.tables.base import Base from myarchive.db.tables.file import TrackedFile from myarchive.db.tables.association_tables import ( at_tweet_tag, at_tweet_file, at_twuser_file) class LJJournal(Base): """Class representing a raw tweet.""" pass
from sqlalchemy import ( Column, Integer, String, TIMESTAMP, ForeignKey) from sqlalchemy.orm import backref, relationship from sqlalchemy.orm.exc import NoResultFound from myarchive.db.tables.base import Base from myarchive.db.tables.file import TrackedFile class LJHost(Base): """Class representing a user retrieved from a LJ-like service.""" __tablename__ = 'lj_hosts' id = Column(Integer, index=True, primary_key=True) url = Column(String) def __init__(self, url): self.url = url class LJUser(Base): """Class representing a user retrieved from a LJ-like service.""" __tablename__ = 'lj_users' id = Column(Integer, index=True, primary_key=True) username = Column(String) host_id = Column(Integer, ForeignKey("lj_hosts.id")) def __init__(self, user_id, username): self.id = user_id self.username = username class LJEntries(Base): """Class representing an entry retrieved from a LJ-like service.""" __tablename__ = 'lj_entries' id = Column(Integer, index=True, primary_key=True) # itemid is unique only to the user, possibly only to the pull... itemid = Column(Integer) eventtime = Column(TIMESTAMP) subject = Column(String) text = Column(String) current_music = Column(String) user_id = Column(Integer, ForeignKey("lj_users.id")) def __init__(self, itemid, eventtime, subject, text, current_music): self.itemid = itemid self.eventtime = eventtime self.subject = subject self.text = text self.current_music = current_music # props["taglist"] # props["current_music"] class LJComments(Base): """Class representing a comment retrieved from a LJ-like service.""" __tablename__ = 'lj_comments' id = Column(Integer, index=True, primary_key=True) body = Column(String) date = Column(TIMESTAMP) parent_id = Column(Integer, ForeignKey("lj_comments.id")) entry_id = Column(Integer, ForeignKey("lj_entries.id"))
Add a bunch of LJ tables
Add a bunch of LJ tables
Python
mit
zetasyanthis/myarchive
--- +++ @@ -1,14 +1,69 @@ from sqlalchemy import ( - LargeBinary, Boolean, Column, Integer, String, PickleType, ForeignKey) + Column, Integer, String, TIMESTAMP, ForeignKey) from sqlalchemy.orm import backref, relationship from sqlalchemy.orm.exc import NoResultFound from myarchive.db.tables.base import Base from myarchive.db.tables.file import TrackedFile -from myarchive.db.tables.association_tables import ( - at_tweet_tag, at_tweet_file, at_twuser_file) -class LJJournal(Base): - """Class representing a raw tweet.""" - pass +class LJHost(Base): + """Class representing a user retrieved from a LJ-like service.""" + + __tablename__ = 'lj_hosts' + + id = Column(Integer, index=True, primary_key=True) + url = Column(String) + + def __init__(self, url): + self.url = url + + +class LJUser(Base): + """Class representing a user retrieved from a LJ-like service.""" + + __tablename__ = 'lj_users' + + id = Column(Integer, index=True, primary_key=True) + username = Column(String) + host_id = Column(Integer, ForeignKey("lj_hosts.id")) + + def __init__(self, user_id, username): + self.id = user_id + self.username = username + + +class LJEntries(Base): + """Class representing an entry retrieved from a LJ-like service.""" + + __tablename__ = 'lj_entries' + + id = Column(Integer, index=True, primary_key=True) + # itemid is unique only to the user, possibly only to the pull... + itemid = Column(Integer) + eventtime = Column(TIMESTAMP) + subject = Column(String) + text = Column(String) + current_music = Column(String) + user_id = Column(Integer, ForeignKey("lj_users.id")) + + def __init__(self, itemid, eventtime, subject, text, current_music): + self.itemid = itemid + self.eventtime = eventtime + self.subject = subject + self.text = text + self.current_music = current_music + # props["taglist"] + # props["current_music"] + + +class LJComments(Base): + """Class representing a comment retrieved from a LJ-like service.""" + + __tablename__ = 'lj_comments' + + id = Column(Integer, index=True, primary_key=True) + body = Column(String) + date = Column(TIMESTAMP) + parent_id = Column(Integer, ForeignKey("lj_comments.id")) + entry_id = Column(Integer, ForeignKey("lj_entries.id"))
13350cdf5598ac0ed55e5404cf6d407300b4c1ac
apps/home/forms.py
apps/home/forms.py
# -*- coding: utf-8 -*- import re from django import forms from apps.chat.models import Chats from django.utils.translation import ugettext as _ class CreateChatForm(forms.Form): pass class JoinChatForm(forms.Form): chat_token = forms.CharField(required=True, max_length=24, label='') chat_token.widget = forms.TextInput({"maxlength": 24, "pattern": "[a-z0-9]{24}", "placeholder": _("please enter your code here..."), "class": "chat-token"}) user_token = False def clean_chat_token(self): """ Validate chat token """ new_chat_token = self.cleaned_data['chat_token'] match = re.search(r'[a-z0-9]{24}', new_chat_token) if not match: raise forms.ValidationError(_('Invalid code.')) self.user_token = Chats.join_to_chat(new_chat_token) if not self.user_token: raise forms.ValidationError(_('Invalid code.'))
# -*- coding: utf-8 -*- import re from django import forms from apps.chat.models import Chats from django.utils.translation import ugettext as _ class CreateChatForm(forms.Form): pass class JoinChatForm(forms.Form): chat_token = forms.CharField(required=True, max_length=24, label='') chat_token.widget = forms.TextInput({"maxlength": 24, "pattern": "[a-z0-9]{24}", "autocomplete": "off", "placeholder": _("please enter your code here..."), "class": "chat-token"}) user_token = False def clean_chat_token(self): """ Validate chat token """ new_chat_token = self.cleaned_data['chat_token'] match = re.search(r'[a-z0-9]{24}', new_chat_token) if not match: raise forms.ValidationError(_('Invalid code.')) self.user_token = Chats.join_to_chat(new_chat_token) if not self.user_token: raise forms.ValidationError(_('Invalid code.'))
Set autocomplete off for chat token form field
Set autocomplete off for chat token form field
Python
bsd-3-clause
MySmile/sfchat,MySmile/sfchat,MySmile/sfchat,MySmile/sfchat
--- +++ @@ -13,6 +13,7 @@ chat_token = forms.CharField(required=True, max_length=24, label='') chat_token.widget = forms.TextInput({"maxlength": 24, "pattern": "[a-z0-9]{24}", + "autocomplete": "off", "placeholder": _("please enter your code here..."), "class": "chat-token"})
e8da237a6c1542b997b061db43cc993942983b10
django_local_apps/management/commands/local_app_utils/db_clean_utils.py
django_local_apps/management/commands/local_app_utils/db_clean_utils.py
from django.db.models import Q from django.utils import timezone def remove_expired_record(expire_days, query_set, time_attr_name="timestamp"): expired_record_filter = {"%s__lt" % time_attr_name: timezone.now() - timezone.timedelta(days=expire_days)} q = Q(**expired_record_filter) final_q = query_set.filter(q) # cnt = 0 # for i in final_q: # i.delete() # cnt += 1 # if cnt % 1000 == 0: # print "%d deleted" % cnt final_q.delete()
from django.db.models import Q from django.utils import timezone def remove_expired_record(expire_days, query_set, time_attr_name="timestamp"): expired_record_filter = {"%s__lt" % time_attr_name: timezone.now() - timezone.timedelta(days=expire_days)} q = Q(**expired_record_filter) final_q = query_set.filter(q) if final_q.count() > 1000: final_q = final_q[:999] # cnt = 0 # for i in final_q: # i.delete() # cnt += 1 # if cnt % 1000 == 0: # print "%d deleted" % cnt final_q.delete()
Delete item with limited size.
Delete item with limited size.
Python
bsd-3-clause
weijia/django-local-apps,weijia/django-local-apps
--- +++ @@ -6,6 +6,8 @@ expired_record_filter = {"%s__lt" % time_attr_name: timezone.now() - timezone.timedelta(days=expire_days)} q = Q(**expired_record_filter) final_q = query_set.filter(q) + if final_q.count() > 1000: + final_q = final_q[:999] # cnt = 0 # for i in final_q: # i.delete()
ce3948b2aacddfb9debd4834d9aa446e99987a0d
app/views.py
app/views.py
from app import mulungwishi_app as url from flask import render_template @url.route('/') def index(): return render_template('index.html') @url.route('/<query>') def print_user_input(query): if '=' in query: query_container, query_value = query.split('=') return 'Your query is {} which is equal to {}'.format(query_container, query_value) return "You've entered an incorrect query. Please check and try again. Input : "+query @url.errorhandler(404) def page_not_found(error): return render_template('404.html'), 404 @url.errorhandler(403) def page_forbidden(error): return render_template('403.html', title='Page Forbidden'), 403 @url.errorhandler(500) def page_server_error(error): return render_template('500.html', title='Server Error'), 500
from app import mulungwishi_app as url from flask import render_template @url.route('/') def index(): return render_template('index.html') @url.route('/<query>') def print_user_input(query): if '=' in query: query_container, query_value = query.split('=') return 'Your query is {} which is equal to {}'.format(query_container, query_value) return "You've entered an incorrect query. Please check and try again. Input : {}".format(query) @url.errorhandler(404) def page_not_found(error): return render_template('404.html'), 404 @url.errorhandler(403) def page_forbidden(error): return render_template('403.html', title='Page Forbidden'), 403 @url.errorhandler(500) def page_server_error(error): return render_template('500.html', title='Server Error'), 500
Replace string concatenation with .format function
Replace string concatenation with .format function
Python
mit
admiral96/mulungwishi-webhook,engagespark/public-webhooks,admiral96/public-webhooks,admiral96/mulungwishi-webhook,admiral96/public-webhooks,engagespark/mulungwishi-webhook,engagespark/mulungwishi-webhook,engagespark/public-webhooks
--- +++ @@ -12,7 +12,7 @@ if '=' in query: query_container, query_value = query.split('=') return 'Your query is {} which is equal to {}'.format(query_container, query_value) - return "You've entered an incorrect query. Please check and try again. Input : "+query + return "You've entered an incorrect query. Please check and try again. Input : {}".format(query) @url.errorhandler(404)
98aea2115cb6c5101379be2320a6fb0735a32490
helenae/web/views.py
helenae/web/views.py
from flask import render_template from flask_app import app @app.route('/') def index(): return render_template('index.html')
# -*- coding: utf-8 -*- import datetime from hashlib import sha256 from time import gmtime, strftime import sqlalchemy from flask import render_template, redirect, url_for from flask_app import app, db_connection, dbTables from forms import RegisterForm @app.route('/', methods=('GET', 'POST')) def index(): return render_template('index.html', title=u"Главная") @app.route('/success', methods=('GET', 'POST')) def success(): return render_template('success.html', title=u"Регистрация завершена!") @app.route('/sign-up', methods=('GET', 'POST')) def sign_up(): form = RegisterForm() if form.validate_on_submit(): # new catalog for user catalog_name = str(form.data['login'] + "_main") new_dir = dbTables.Catalog(catalog_name) db_connection.session.add(new_dir) db_connection.session.commit() # new filespace for user fs_name = str(form.data['login'] + "_fs") new_fs = dbTables.FileSpace(fs_name, new_dir) db_connection.session.add(new_fs) db_connection.session.commit() fs = db_connection.session.execute(sqlalchemy.select([dbTables.FileSpace]).where(dbTables.FileSpace.storage_name == fs_name)) fs = fs.fetchone() time_is = datetime.datetime.strptime(strftime("%d.%m.%Y", gmtime()), "%d.%m.%Y").date() time_is = time_is + datetime.timedelta(days=365) date_max = time_is.strftime("%d.%m.%Y") id_new = db_connection.session.execute(sqlalchemy.func.count(dbTables.Users.id)).fetchone()[0] + 1 password_hash = str(sha256(form.data['password']+str(id_new)).hexdigest()) # create new user new_user = dbTables.Users(form.data['login'], form.data['fullname'], password_hash, form.data['email'], date_max, 1, 2, fs.id) db_connection.session.add(new_user) db_connection.session.commit() return redirect(url_for('success')) return render_template('sign-up.html', title=u"Регистрация", form=form) @app.route('/sign-in', methods=('GET', 'POST')) def sign_in(): return render_template('sign-in.html', title=u"Аутентификация") @app.route('/forgot-password', methods=('GET', 'POST')) def forgot_password(): return render_template('forgot-password.html', title=u"Восстановление доступа")
Add routes for other pages
Add routes for other pages
Python
mit
Relrin/Helenae,Relrin/Helenae,Relrin/Helenae
--- +++ @@ -1,7 +1,54 @@ -from flask import render_template -from flask_app import app +# -*- coding: utf-8 -*- +import datetime +from hashlib import sha256 +from time import gmtime, strftime -@app.route('/') +import sqlalchemy +from flask import render_template, redirect, url_for +from flask_app import app, db_connection, dbTables +from forms import RegisterForm + + +@app.route('/', methods=('GET', 'POST')) def index(): - return render_template('index.html') + return render_template('index.html', title=u"Главная") +@app.route('/success', methods=('GET', 'POST')) +def success(): + return render_template('success.html', title=u"Регистрация завершена!") + +@app.route('/sign-up', methods=('GET', 'POST')) +def sign_up(): + form = RegisterForm() + if form.validate_on_submit(): + # new catalog for user + catalog_name = str(form.data['login'] + "_main") + new_dir = dbTables.Catalog(catalog_name) + db_connection.session.add(new_dir) + db_connection.session.commit() + # new filespace for user + fs_name = str(form.data['login'] + "_fs") + new_fs = dbTables.FileSpace(fs_name, new_dir) + db_connection.session.add(new_fs) + db_connection.session.commit() + fs = db_connection.session.execute(sqlalchemy.select([dbTables.FileSpace]).where(dbTables.FileSpace.storage_name == fs_name)) + fs = fs.fetchone() + time_is = datetime.datetime.strptime(strftime("%d.%m.%Y", gmtime()), "%d.%m.%Y").date() + time_is = time_is + datetime.timedelta(days=365) + date_max = time_is.strftime("%d.%m.%Y") + id_new = db_connection.session.execute(sqlalchemy.func.count(dbTables.Users.id)).fetchone()[0] + 1 + password_hash = str(sha256(form.data['password']+str(id_new)).hexdigest()) + # create new user + new_user = dbTables.Users(form.data['login'], form.data['fullname'], password_hash, form.data['email'], date_max, 1, 2, fs.id) + db_connection.session.add(new_user) + db_connection.session.commit() + return redirect(url_for('success')) + return render_template('sign-up.html', title=u"Регистрация", form=form) + +@app.route('/sign-in', methods=('GET', 'POST')) +def sign_in(): + return render_template('sign-in.html', title=u"Аутентификация") + +@app.route('/forgot-password', methods=('GET', 'POST')) +def forgot_password(): + return render_template('forgot-password.html', title=u"Восстановление доступа")
2aed0c4089eced430dabf8d63c732e6b0013f540
project_fish/whats_fresh/models.py
project_fish/whats_fresh/models.py
from django.contrib.gis.db import models import os from phonenumber_field.modelfields import PhoneNumberField class Image(models.Model): """ The Image model holds an image and related data. The Created and Modified time fields are created automatically by Django when the object is created or modified, and can not be altered. This model uses Django's built-ins for holding the image location and data in the database, as well as for keeping created and modified timestamps. """ image = models.ImageField(upload_to='%Y/%m/%d') caption = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) class Vendor(models.Model): """ The Vendor model holds the information for a vendor, including the geographic location as a pair of latitudinal/logitudinal coordinates, a street address, and an optional text description of their location (in case the address/coordinates are of, say, a dock instead of a shop). """ pass
from django.contrib.gis.db import models import os from phonenumber_field.modelfields import PhoneNumberField class Image(models.Model): """ The Image model holds an image and related data. The Created and Modified time fields are created automatically by Django when the object is created or modified, and can not be altered. This model uses Django's built-ins for holding the image location and data in the database, as well as for keeping created and modified timestamps. """ image = models.ImageField(upload_to='/') caption = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) class Vendor(models.Model): """ The Vendor model holds the information for a vendor, including the geographic location as a pair of latitudinal/logitudinal coordinates, a street address, and an optional text description of their location (in case the address/coordinates are of, say, a dock instead of a shop). """ pass
Change upload path for images
Change upload path for images
Python
apache-2.0
osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
--- +++ @@ -14,7 +14,7 @@ timestamps. """ - image = models.ImageField(upload_to='%Y/%m/%d') + image = models.ImageField(upload_to='/') caption = models.TextField() created = models.DateTimeField(auto_now_add=True)
0ee0d94e6a167ab8994a5e61f3db45799eba7a12
dragonflow/common/common_params.py
dragonflow/common/common_params.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from neutron.i18n import _ df_opts = [ cfg.StrOpt('remote_db_ip', default='127.0.0.1', help=_('The remote db server ip address')), cfg.IntOpt('remote_db_port', default=4001, help=_('The remote db server port')), cfg.StrOpt('nb_db_class', default='dragonflow.db.drivers.etcd_db_driver.EtcdDbDriver', help=_('The driver class for the NB DB driver')), cfg.StrOpt('local_ip', default='127.0.0.1', help=_('Local host IP')), cfg.StrOpt('tunnel_type', default='geneve', help=_('The encapsulation type for the tunnel')), cfg.StrOpt('apps_list', default='l2_app.L2App,l3_app.L3App', help=_('List of openflow applications classes to load')), ]
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from neutron.i18n import _ df_opts = [ cfg.StrOpt('remote_db_ip', default='127.0.0.1', help=_('The remote db server ip address')), cfg.PortOpt('remote_db_port', default=4001, help=_('The remote db server port')), cfg.StrOpt('nb_db_class', default='dragonflow.db.drivers.etcd_db_driver.EtcdDbDriver', help=_('The driver class for the NB DB driver')), cfg.StrOpt('local_ip', default='127.0.0.1', help=_('Local host IP')), cfg.StrOpt('tunnel_type', default='geneve', help=_('The encapsulation type for the tunnel')), cfg.StrOpt('apps_list', default='l2_app.L2App,l3_app.L3App', help=_('List of openflow applications classes to load')), ]
Use oslo_config new type PortOpt for port options
Use oslo_config new type PortOpt for port options The oslo_config library provides new type PortOpt to validate the range of port now. Change-Id: Ifbfee642309fec668e363555c2abd103c1f8c4af ref: https://github.com/openstack/oslo.config/blob/2.6.0/oslo_config/cfg.py#L1114 Depends-On: Ida294b05a85f5bef587b761fcd03c28c7a3474d8
Python
apache-2.0
FrankDuan/df_code,openstack/dragonflow,FrankDuan/df_code,openstack/dragonflow,FrankDuan/df_code,openstack/dragonflow
--- +++ @@ -18,9 +18,9 @@ cfg.StrOpt('remote_db_ip', default='127.0.0.1', help=_('The remote db server ip address')), - cfg.IntOpt('remote_db_port', - default=4001, - help=_('The remote db server port')), + cfg.PortOpt('remote_db_port', + default=4001, + help=_('The remote db server port')), cfg.StrOpt('nb_db_class', default='dragonflow.db.drivers.etcd_db_driver.EtcdDbDriver', help=_('The driver class for the NB DB driver')),
590f80bc382cdbec97e2cb3e7b92c79bb7fa89cc
dyndns/models.py
dyndns/models.py
from django.db import models class Record(models.Model): domain_id = models.IntegerField() name = models.CharField(max_length=30) type=models.CharField(max_length=6) content=models.CharField(max_length=30) ttl=models.IntegerField() prio=models.IntegerField() change_date= models.IntegerField() def __unicode__(self): return self.content def __unicode__(self): return self.name class Meta: db_table = 'records'
from django.db import models class Domain(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(unique=True, max_length=255) master = models.CharField(max_length=128) last_check = models.IntegerField() type = models.CharField(max_length=6) notified_serial = models.IntegerField() account = models.CharField(max_length=40) def __unicode__(self): return self.name class Meta: db_table = u'domains' class Record(models.Model): id = models.IntegerField(primary_key=True) domain = models.ForeignKey(Domains) name = models.CharField(max_length=255) type = models.CharField(max_length=6) content = models.CharField(max_length=255) ttl = models.IntegerField() prio = models.IntegerField() change_date = models.IntegerField() def __unicode__(self): return self.name class Meta: db_table = u'records' class Supermaster(models.Model): ip = models.CharField(max_length=25) nameserver = models.CharField(max_length=255) account = models.CharField(max_length=40) class Meta: db_table = u'supermasters'
Update model to match latest PDNS and add other two tables
Update model to match latest PDNS and add other two tables
Python
bsd-2-clause
zefciu/django-powerdns-dnssec,allegro/django-powerdns-dnssec,dominikkowalski/django-powerdns-dnssec,dominikkowalski/django-powerdns-dnssec,dominikkowalski/django-powerdns-dnssec,allegro/django-powerdns-dnssec,zefciu/django-powerdns-dnssec,dominikkowalski/django-powerdns-dnssec,zefciu/django-powerdns-dnssec,allegro/django-powerdns-dnssec,allegro/django-powerdns-dnssec,dominikkowalski/django-powerdns-dnssec
--- +++ @@ -1,18 +1,36 @@ from django.db import models -class Record(models.Model): - domain_id = models.IntegerField() - name = models.CharField(max_length=30) - type=models.CharField(max_length=6) - content=models.CharField(max_length=30) - ttl=models.IntegerField() - prio=models.IntegerField() - change_date= models.IntegerField() - def __unicode__(self): - return self.content - def __unicode__(self): - return self.name - class Meta: - db_table = 'records' +class Domain(models.Model): + id = models.IntegerField(primary_key=True) + name = models.CharField(unique=True, max_length=255) + master = models.CharField(max_length=128) + last_check = models.IntegerField() + type = models.CharField(max_length=6) + notified_serial = models.IntegerField() + account = models.CharField(max_length=40) + def __unicode__(self): + return self.name + class Meta: + db_table = u'domains' +class Record(models.Model): + id = models.IntegerField(primary_key=True) + domain = models.ForeignKey(Domains) + name = models.CharField(max_length=255) + type = models.CharField(max_length=6) + content = models.CharField(max_length=255) + ttl = models.IntegerField() + prio = models.IntegerField() + change_date = models.IntegerField() + def __unicode__(self): + return self.name + class Meta: + db_table = u'records' +class Supermaster(models.Model): + ip = models.CharField(max_length=25) + nameserver = models.CharField(max_length=255) + account = models.CharField(max_length=40) + class Meta: + db_table = u'supermasters' +
454e107abfdc9e3038a18500568e9a1357364bd0
pygraphc/similarity/JaroWinkler.py
pygraphc/similarity/JaroWinkler.py
import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') return jellyfish.jaro_winkler(string1, string2) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) if distance > 0: distance_with_id = (unique_event_id[0], unique_event_id[1], distance) return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') distance = jellyfish.jaro_winkler(string1, string2) if distance > 0.: return round(distance, 3) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) distance_with_id = (unique_event_id[0], unique_event_id[1], distance) return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
Add checking for zero distance
Add checking for zero distance
Python
mit
studiawan/pygraphc
--- +++ @@ -11,13 +11,14 @@ def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') - return jellyfish.jaro_winkler(string1, string2) + distance = jellyfish.jaro_winkler(string1, string2) + if distance > 0.: + return round(distance, 3) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) - if distance > 0: - distance_with_id = (unique_event_id[0], unique_event_id[1], distance) - return distance_with_id + distance_with_id = (unique_event_id[0], unique_event_id[1], distance) + return distance_with_id def get_jarowinkler(self): # get unique event id combination
39d7ec0fe9fdbdd152dfcc2d4280b784f6315886
stardate/urls/index_urls.py
stardate/urls/index_urls.py
from django.conf.urls import include, url from django.views import generic from stardate.models import Blog from stardate.views import ( BlogCreate, select_backend, process_webhook, ) urlpatterns = [ url(r'^new/$', select_backend, name='blog-new'), url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'), url(r'^providers/$', select_backend, name='provider-select'), url(r'^webhook/$', process_webhook, name='webhook'), url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'), ]
from django.conf.urls import include, url from django.views import generic from stardate.models import Blog from stardate.views import ( BlogCreate, select_backend, process_webhook, ) urlpatterns = [ url(r'^new/$', select_backend, name='blog-new'), url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'), url(r'^providers/$', select_backend, name='provider-select'), url(r'^webhook/$', process_webhook, name='webhook'), url(r'^(?P<blog_slug>[-\w]+)/', include('stardate.urls.blog_urls')), url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'), ]
Revert "remove blog urls from index urls"
Revert "remove blog urls from index urls" This reverts commit f8d5541a8e5de124dcec62a32bd19a8226869622.
Python
bsd-3-clause
blturner/django-stardate,blturner/django-stardate
--- +++ @@ -14,5 +14,6 @@ url(r'^create/(?P<provider>[-\w]+)/$', BlogCreate.as_view(), name='blog-create'), url(r'^providers/$', select_backend, name='provider-select'), url(r'^webhook/$', process_webhook, name='webhook'), + url(r'^(?P<blog_slug>[-\w]+)/', include('stardate.urls.blog_urls')), url(r'^$', generic.ListView.as_view(model=Blog), name='blog-list'), ]
93d0f11658c7417371ec2e040397c7a572559585
django_remote_submission/consumers.py
django_remote_submission/consumers.py
"""Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job @channel_session_user_from_http def ws_connect(message): message.reply_channel.send({ 'accept': True, }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
"""Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job import json @channel_session_user_from_http def ws_connect(message): last_jobs = message.user.jobs.order_by('-modified')[:10] for job in last_jobs: message.reply_channel.send({ 'text': json.dumps({ 'job_id': job.id, 'title': job.title, 'status': job.status, }), }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
Send last jobs on initial connection
Send last jobs on initial connection
Python
isc
ornl-ndav/django-remote-submission,ornl-ndav/django-remote-submission,ornl-ndav/django-remote-submission
--- +++ @@ -7,12 +7,21 @@ from .models import Job +import json + @channel_session_user_from_http def ws_connect(message): - message.reply_channel.send({ - 'accept': True, - }) + last_jobs = message.user.jobs.order_by('-modified')[:10] + + for job in last_jobs: + message.reply_channel.send({ + 'text': json.dumps({ + 'job_id': job.id, + 'title': job.title, + 'status': job.status, + }), + }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel,
099c41acc83fdd6589f5a53ec8d1615d96e4d188
website/articles.py
website/articles.py
import sys from tinydb import TinyDB, where import markdown2 articles_db = TinyDB("data/articles.json") def get_all_articles(): return articles_db.all() def get_article_data_by_url(url: str): results = articles_db.search(where("url") == url) if len(results) == 0: raise FileNotFoundError("Error: no article with url " + url) elif len(results) == 1: return results[0] else: # TODO handle multiple results case return results[0] def get_article_html(url: str): info = get_article_data_by_url(url) html = None if "html" in info: html = get_contents(info["html"]) elif "markdown" in info: markdown = get_contents(info["markdown"]) html = markdown2.markdown(markdown) return html def get_contents(filename: str): contents = None with open("data/articles/" + filename) as file: contents = file.read() return contents
import sys from tinydb import TinyDB, where import markdown2 articles_db = TinyDB("data/articles.json") def get_all_articles(): return articles_db.all() def get_article_data_by_url(url: str): results = articles_db.search(where("url") == url) if len(results) == 0: raise FileNotFoundError("Error: no article with url " + url) elif len(results) == 1: return results[0] else: # TODO handle multiple results case return results[0] def get_article_html(url: str): info = get_article_data_by_url(url) html = None if "html" in info: html = get_contents(info["html"]) elif "markdown" in info: markdown = get_contents(info["markdown"]) html = markdown2.markdown(markdown) return html def get_contents(filename: str): contents = None with open("data/articles/" + filename, "r", encoding="utf8") as file: contents = file.read() return contents
Set file open to enforce utf8
Set file open to enforce utf8
Python
apache-2.0
timlyo/personalWebsite,timlyo/timlyo.github.io,timlyo/timlyo.github.io,timlyo/personalWebsite,timlyo/personalWebsite,timlyo/timlyo.github.io
--- +++ @@ -33,6 +33,6 @@ def get_contents(filename: str): contents = None - with open("data/articles/" + filename) as file: + with open("data/articles/" + filename, "r", encoding="utf8") as file: contents = file.read() return contents
22472d7947c16388dc849b2c317ee4d509322754
docs/examples/01_make_resourcelist.py
docs/examples/01_make_resourcelist.py
from resync.resource_list import ResourceList from resync.resource import Resource from resync.sitemap import Sitemap rl = ResourceList() rl.add( Resource('http://example.com/res1', lastmod='2013-01-01') ) rl.add( Resource('http://example.com/res2', lastmod='2013-01-02') ) sm = Sitemap(pretty_xml=True) print sm.resources_as_xml(rl)
from resync.resource_list import ResourceList from resync.resource import Resource from resync.sitemap import Sitemap rl = ResourceList() rl.add( Resource('http://example.com/res1', lastmod='2013-01-01') ) rl.add( Resource('http://example.com/res2', lastmod='2013-01-02') ) print rl.as_xml(pretty_xml=True)
Update to use ResourceList methods
Update to use ResourceList methods
Python
apache-2.0
dans-er/resync,lindareijnhoudt/resync,lindareijnhoudt/resync,dans-er/resync,resync/resync
--- +++ @@ -5,5 +5,4 @@ rl = ResourceList() rl.add( Resource('http://example.com/res1', lastmod='2013-01-01') ) rl.add( Resource('http://example.com/res2', lastmod='2013-01-02') ) -sm = Sitemap(pretty_xml=True) -print sm.resources_as_xml(rl) +print rl.as_xml(pretty_xml=True)
e82225201772794bf347c6e768d25f24a61b9b54
migrations/schematic_settings.py
migrations/schematic_settings.py
import sys import os # This only works if you're running schematic from the zamboni root. sys.path.insert(0, os.path.realpath('.')) # Set up zamboni. import manage from django.conf import settings config = settings.DATABASES['default'] config['HOST'] = config.get('HOST', 'localhost') config['PORT'] = config.get('PORT', '3306') if config['HOST'].endswith('.sock'): """ Oh you meant 'localhost'! """ config['HOST'] = 'localhost' s = 'mysql --silent {NAME} -h{HOST} -P{PORT} -u{USER}' if config['PASSWORD']: s += ' -p{PASSWORD}' else: del config['PASSWORD'] db = s.format(**config) table = 'schema_version'
import sys import os sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Set up zamboni. import manage from django.conf import settings config = settings.DATABASES['default'] config['HOST'] = config.get('HOST', 'localhost') config['PORT'] = config.get('PORT', '3306') if not config['HOST'] or config['HOST'].endswith('.sock'): """ Oh you meant 'localhost'! """ config['HOST'] = 'localhost' s = 'mysql --silent {NAME} -h{HOST} -u{USER}' if config['PASSWORD']: s += ' -p{PASSWORD}' else: del config['PASSWORD'] if config['PORT']: s += ' -P{PORT}' else: del config['PORT'] db = s.format(**config) table = 'schema_version'
Make the settings work when there's no port, and fix up the path manipulation
Make the settings work when there's no port, and fix up the path manipulation
Python
bsd-3-clause
kumar303/zamboni,kmaglione/olympia,Prashant-Surya/addons-server,jamesthechamp/zamboni,yfdyh000/olympia,aviarypl/mozilla-l10n-addons-server,Joergen/zamboni,muffinresearch/addons-server,Jobava/zamboni,koehlermichael/olympia,clouserw/zamboni,kmaglione/olympia,mstriemer/addons-server,Hitechverma/zamboni,mstriemer/olympia,psiinon/addons-server,mozilla/addons-server,lavish205/olympia,Nolski/olympia,spasovski/zamboni,jasonthomas/zamboni,jpetto/olympia,kumar303/olympia,beni55/olympia,crdoconnor/olympia,diox/zamboni,harry-7/addons-server,andymckay/addons-server,clouserw/zamboni,andymckay/olympia,andymckay/zamboni,Nolski/olympia,kmaglione/olympia,SuriyaaKudoIsc/olympia,magopian/olympia,psiinon/addons-server,kumar303/zamboni,mstriemer/zamboni,Joergen/olympia,shahbaz17/zamboni,ayushagrawal288/zamboni,Jobava/zamboni,jbalogh/zamboni,Hitechverma/zamboni,magopian/olympia,atiqueahmedziad/addons-server,Joergen/olympia,tsl143/zamboni,elysium001/zamboni,eviljeff/zamboni,andymckay/olympia,wagnerand/olympia,mozilla/zamboni,johancz/olympia,shahbaz17/zamboni,kmaglione/olympia,SuriyaaKudoIsc/olympia,atiqueahmedziad/addons-server,luckylavish/zamboni,bqbn/addons-server,johancz/olympia,psiinon/addons-server,jamesthechamp/zamboni,robhudson/zamboni,mozilla/olympia,diox/zamboni,anaran/olympia,crdoconnor/olympia,andymckay/zamboni,mudithkr/zamboni,harry-7/addons-server,mdaif/olympia,shahbaz17/zamboni,mozilla/addons-server,atiqueahmedziad/addons-server,mozilla/olympia,Witia1/olympia,ingenioustechie/zamboni,wagnerand/olympia,spasovski/zamboni,Nolski/olympia,robhudson/zamboni,kumar303/addons-server,koehlermichael/olympia,lavish205/olympia,luckylavish/zamboni,muffinresearch/olympia,jasonthomas/zamboni,clouserw/zamboni,ddurst/zamboni,mstriemer/addons-server,diox/zamboni,elysium001/zamboni,shahbaz17/zamboni,ayushagrawal288/zamboni,koehlermichael/olympia,eviljeff/zamboni,ngokevin/zamboni,beni55/olympia,SuriyaaKudoIsc/olympia,atiqueahmedziad/addons-server,kumar303/olympia,Prashant-Surya/addons-server,ngokevin/zamboni,wagnerand/zamboni,robhudson/zamboni,andymckay/addons-server,eviljeff/zamboni,bqbn/addons-server,mrrrgn/olympia,jamesthechamp/zamboni,johancz/olympia,crdoconnor/olympia,crdoconnor/olympia,Nolski/olympia,luckylavish/zamboni,muffinresearch/olympia,robhudson/zamboni,ingenioustechie/zamboni,jpetto/olympia,Jobava/zamboni,muffinresearch/addons-server,jasonthomas/zamboni,spasovski/zamboni,Prashant-Surya/addons-server,mstriemer/zamboni,washort/zamboni,Joergen/zamboni,Prashant-Surya/addons-server,Joergen/zamboni,wagnerand/addons-server,mozilla/olympia,eviljeff/olympia,mozilla/zamboni,Witia1/olympia,tsl143/addons-server,eviljeff/olympia,jbalogh/zamboni,kumar303/addons-server,andymckay/addons-server,Witia1/olympia,wagnerand/zamboni,magopian/olympia,Joergen/zamboni,jbalogh/zamboni,mstriemer/zamboni,washort/zamboni,jpetto/olympia,andymckay/olympia,ddurst/zamboni,elysium001/zamboni,Revanth47/addons-server,aviarypl/mozilla-l10n-addons-server,spasovski/zamboni,wagnerand/zamboni,kumar303/zamboni,diox/olympia,ayushagrawal288/zamboni,lavish205/olympia,mozilla/olympia,mstriemer/olympia,yfdyh000/olympia,harikishen/addons-server,andymckay/olympia,washort/zamboni,Nolski/olympia,ddurst/zamboni,Joergen/olympia,tsl143/zamboni,harikishen/addons-server,wagnerand/olympia,muffinresearch/addons-server,tsl143/addons-server,diox/olympia,washort/zamboni,clouserw/zamboni,luckylavish/zamboni,mstriemer/addons-server,anaran/olympia,beni55/olympia,jasonthomas/zamboni,mudithkr/zamboni,mrrrgn/olympia,crdoconnor/olympia,mrrrgn/olympia,kumar303/addons-server,tsl143/addons-server,magopian/olympia,andymckay/zamboni,johancz/olympia,lavish205/olympia,tsl143/addons-server,ngokevin/zamboni,mozilla/addons-server,aviarypl/mozilla-l10n-addons-server,mozilla/zamboni,Witia1/olympia,Jobava/zamboni,beni55/olympia,muffinresearch/olympia,mrrrgn/olympia,mstriemer/olympia,Joergen/olympia,muffinresearch/addons-server,wagnerand/addons-server,Revanth47/addons-server,SuriyaaKudoIsc/olympia,beni55/olympia,diox/zamboni,Witia1/olympia,andymckay/addons-server,mstriemer/zamboni,mozilla/zamboni,mdaif/olympia,Hitechverma/zamboni,Revanth47/addons-server,diox/olympia,mudithkr/zamboni,ddurst/zamboni,anaran/olympia,ingenioustechie/zamboni,tsl143/zamboni,Joergen/zamboni,tsl143/zamboni,diox/olympia,yfdyh000/olympia,magopian/olympia,mdaif/olympia,kumar303/zamboni,jpetto/olympia,Joergen/zamboni,yfdyh000/olympia,ayushagrawal288/zamboni,kumar303/olympia,wagnerand/addons-server,mdaif/olympia,muffinresearch/olympia,harry-7/addons-server,kmaglione/olympia,elysium001/zamboni,jamesthechamp/zamboni,koehlermichael/olympia,psiinon/addons-server,johancz/olympia,yfdyh000/olympia,mudithkr/zamboni,eviljeff/olympia,mrrrgn/olympia,ingenioustechie/zamboni,anaran/olympia,wagnerand/olympia,bqbn/addons-server,wagnerand/addons-server,kumar303/olympia,eviljeff/zamboni,aviarypl/mozilla-l10n-addons-server,Revanth47/addons-server,harry-7/addons-server,mstriemer/addons-server,wagnerand/zamboni,harikishen/addons-server,mozilla/addons-server,Joergen/olympia,kumar303/addons-server,eviljeff/olympia,jbalogh/zamboni,Hitechverma/zamboni,mdaif/olympia,mstriemer/olympia,harikishen/addons-server,koehlermichael/olympia,muffinresearch/olympia,bqbn/addons-server,muffinresearch/addons-server
--- +++ @@ -1,8 +1,7 @@ import sys import os -# This only works if you're running schematic from the zamboni root. -sys.path.insert(0, os.path.realpath('.')) +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Set up zamboni. import manage @@ -12,16 +11,20 @@ config['HOST'] = config.get('HOST', 'localhost') config['PORT'] = config.get('PORT', '3306') -if config['HOST'].endswith('.sock'): +if not config['HOST'] or config['HOST'].endswith('.sock'): """ Oh you meant 'localhost'! """ config['HOST'] = 'localhost' -s = 'mysql --silent {NAME} -h{HOST} -P{PORT} -u{USER}' +s = 'mysql --silent {NAME} -h{HOST} -u{USER}' if config['PASSWORD']: s += ' -p{PASSWORD}' else: del config['PASSWORD'] +if config['PORT']: + s += ' -P{PORT}' +else: + del config['PORT'] db = s.format(**config) table = 'schema_version'
e53e489da4e9f53e371997449bc813def2600008
opps/contrib/notifications/models.py
opps/contrib/notifications/models.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.core.models import Publishable from opps.db import Db NOTIFICATION_TYPE = ( (u'json', _(u'JSON')), (u'text', _(u'Text')), (u'html', _(u'HTML')), ) class Notification(Publishable): container = models.ForeignKey('containers.Container') action = models.CharField(_('Action'), max_length=75, default="message") type = models.CharField(_('Type'), max_length=10, choices=NOTIFICATION_TYPE, default='json') message = models.TextField(_('Message')) def save(self, *args, **kwargs): super(Notification, self).save(*args, **kwargs) _db = Db(self.container.get_absolute_url(), self.container.id) message = self.message if self.type == "json": message = json.dumps(self.message) _db.publish(json.dumps({ "action": self.action, "id": self.id, "published": self.published, "date": self.date_available.strftime("%D %T"), "message": message}))
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.core.models import Publishable from opps.db import Db NOTIFICATION_TYPE = ( (u'json', _(u'JSON')), (u'text', _(u'Text')), (u'html', _(u'HTML')), ) class Notification(Publishable): container = models.ForeignKey('containers.Container') action = models.CharField(_('Action'), max_length=75, default="message") type = models.CharField(_('Type'), max_length=10, choices=NOTIFICATION_TYPE, default='json') message = models.TextField(_('Message')) def save(self, *args, **kwargs): super(Notification, self).save(*args, **kwargs) _db = Db(self.container.get_absolute_url(), self.container.id) message = self.message if self.type == "json": message = json.dumps(self.message) _db.publish(json.dumps({ "action": self.action, "id": self.id, "published": self.published, "date": self.date_available.strftime("%D %T"), "message": message})) def get_absolute_url(self): return u"/{}/{}.server".format(self.container.channel_long_slug, self.container.slug)
Add get_absolute_url on notification model
Add get_absolute_url on notification model
Python
mit
YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps
--- +++ @@ -37,3 +37,7 @@ "published": self.published, "date": self.date_available.strftime("%D %T"), "message": message})) + + def get_absolute_url(self): + return u"/{}/{}.server".format(self.container.channel_long_slug, + self.container.slug)
eccda634d3233cd4f8aaeea372735731fd674c29
pysis/labels/__init__.py
pysis/labels/__init__.py
import io import functools import warnings import six from .decoder import LabelDecoder from .encoder import LabelEncoder def load(stream): """Parse an isis label from a stream. :param stream: a ``.read()``-supporting file-like object containing a label. if ``stream`` is a string it will be treated as a filename """ if isinstance(stream, six.string_types): with open(stream, 'rb') as fp: return LabelDecoder().decode(fp) return LabelDecoder().decode(stream) def loads(data, encoding='utf-8'): """Parse an isis label from a string. :param data: an isis label as a string :returns: a dictionary representation of the given isis label """ if not isinstance(data, bytes): data = data.encode(encoding) return LabelDecoder().decode(data) def dump(label, stream): LabelEncoder().encode(label, stream) def dumps(label): stream = io.BytesIO() LabelEncoder().encode(label, stream) return stream.getvalue() @functools.wraps(load) def parse_file_label(stream): warnings.warn('parse_file_label is deprecated. use load instead.') return load(stream) @functools.wraps(loads) def parse_label(data, encoding='utf-8'): warnings.warn('parse_label is deprecated. use load instead.') return loads(data, encoding='utf-8')
import io import warnings import six from .decoder import LabelDecoder from .encoder import LabelEncoder def load(stream): """Parse an isis label from a stream. :param stream: a ``.read()``-supporting file-like object containing a label. if ``stream`` is a string it will be treated as a filename """ if isinstance(stream, six.string_types): with open(stream, 'rb') as fp: return LabelDecoder().decode(fp) return LabelDecoder().decode(stream) def loads(data, encoding='utf-8'): """Parse an isis label from a string. :param data: an isis label as a string :returns: a dictionary representation of the given isis label """ if not isinstance(data, bytes): data = data.encode(encoding) return LabelDecoder().decode(data) def dump(label, stream): LabelEncoder().encode(label, stream) def dumps(label): stream = io.BytesIO() LabelEncoder().encode(label, stream) return stream.getvalue() def parse_file_label(stream): load.__doc__ + """ deprecated:: 0.4.0 Use load instead. """ warnings.warn('parse_file_label is deprecated. use load instead.') return load(stream) def parse_label(data, encoding='utf-8'): loads.__doc__ + """ deprecated:: 0.4.0 Use loads instead. """ warnings.warn('parse_label is deprecated. use loads instead.') return loads(data, encoding='utf-8')
Add depreciation messages to old parse_label methods.
Add depreciation messages to old parse_label methods.
Python
bsd-3-clause
michaelaye/Pysis,wtolson/pysis,wtolson/pysis,michaelaye/Pysis
--- +++ @@ -1,5 +1,4 @@ import io -import functools import warnings import six @@ -41,13 +40,19 @@ return stream.getvalue() -@functools.wraps(load) def parse_file_label(stream): + load.__doc__ + """ + deprecated:: 0.4.0 + Use load instead. + """ warnings.warn('parse_file_label is deprecated. use load instead.') return load(stream) -@functools.wraps(loads) def parse_label(data, encoding='utf-8'): - warnings.warn('parse_label is deprecated. use load instead.') + loads.__doc__ + """ + deprecated:: 0.4.0 + Use loads instead. + """ + warnings.warn('parse_label is deprecated. use loads instead.') return loads(data, encoding='utf-8')
14c7f4295701ffbb5c7cd14062a43cb8c86d57de
qtpy/tests/test_qtsql.py
qtpy/tests/test_qtsql.py
from __future__ import absolute_import import pytest from qtpy import QtSql def test_qtsvg(): """Test the qtpy.QtSql namespace""" assert QtSql.QSqlDatabase is not None # assert QtSql.QSqlDriverCreator is not None assert QtSql.QSqlDriverCreatorBase is not None assert QtSql.QSqlDriver is not None assert QtSql.QSqlDriverPlugin is not None assert QtSql.QSqlError is not None assert QtSql.QSqlField is not None assert QtSql.QSqlIndex is not None assert QtSql.QSqlQuery is not None assert QtSql.QSqlRecord is not None assert QtSql.QSqlResult is not None assert QtSql.QSqlQueryModel is not None assert QtSql.QSqlRelationalDelegate is not None assert QtSql.QSqlRelation is not None assert QtSql.QSqlRelationalTableModel is not None assert QtSql.QSqlTableModel is not None
from __future__ import absolute_import import pytest from qtpy import QtSql def test_qtsvg(): """Test the qtpy.QtSql namespace""" assert QtSql.QSqlDatabase is not None # assert QtSql.QSqlDriverCreator is not None assert QtSql.QSqlDriverCreatorBase is not None assert QtSql.QSqlDriver is not None #assert QtSql.QSqlDriverPlugin is not None assert QtSql.QSqlError is not None assert QtSql.QSqlField is not None assert QtSql.QSqlIndex is not None assert QtSql.QSqlQuery is not None assert QtSql.QSqlRecord is not None assert QtSql.QSqlResult is not None assert QtSql.QSqlQueryModel is not None assert QtSql.QSqlRelationalDelegate is not None assert QtSql.QSqlRelation is not None assert QtSql.QSqlRelationalTableModel is not None assert QtSql.QSqlTableModel is not None
Remove another class (QSqlDriverPlugin) from test
Remove another class (QSqlDriverPlugin) from test
Python
mit
davvid/qtpy,davvid/qtpy,spyder-ide/qtpy,goanpeca/qtpy,goanpeca/qtpy
--- +++ @@ -9,7 +9,7 @@ # assert QtSql.QSqlDriverCreator is not None assert QtSql.QSqlDriverCreatorBase is not None assert QtSql.QSqlDriver is not None - assert QtSql.QSqlDriverPlugin is not None + #assert QtSql.QSqlDriverPlugin is not None assert QtSql.QSqlError is not None assert QtSql.QSqlField is not None assert QtSql.QSqlIndex is not None
0fae7ce68a531b2c27e03a854fba3319d041ee45
mezzanine/twitter/managers.py
mezzanine/twitter/managers.py
from django.db.models import Manager from mezzanine.utils.cache import cache_installed class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) if created or cache_installed(): query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all()
from django.db.models import Manager class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) if created: query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all()
Revert cache changes to Twitter queries - since authenticated users bypass the cache, and the Twitter call will generate a lot of queries.
Revert cache changes to Twitter queries - since authenticated users bypass the cache, and the Twitter call will generate a lot of queries.
Python
bsd-2-clause
viaregio/mezzanine,promil23/mezzanine,biomassives/mezzanine,dekomote/mezzanine-modeltranslation-backport,sjuxax/mezzanine,AlexHill/mezzanine,dustinrb/mezzanine,ZeroXn/mezzanine,theclanks/mezzanine,theclanks/mezzanine,SoLoHiC/mezzanine,guibernardino/mezzanine,dekomote/mezzanine-modeltranslation-backport,emile2016/mezzanine,sjdines/mezzanine,promil23/mezzanine,eino-makitalo/mezzanine,dsanders11/mezzanine,PegasusWang/mezzanine,nikolas/mezzanine,ZeroXn/mezzanine,biomassives/mezzanine,saintbird/mezzanine,cccs-web/mezzanine,jerivas/mezzanine,stbarnabas/mezzanine,Kniyl/mezzanine,gbosh/mezzanine,frankier/mezzanine,douglaskastle/mezzanine,jjz/mezzanine,agepoly/mezzanine,molokov/mezzanine,mush42/mezzanine,saintbird/mezzanine,geodesign/mezzanine,scarcry/snm-mezzanine,industrydive/mezzanine,ryneeverett/mezzanine,molokov/mezzanine,Skytorn86/mezzanine,spookylukey/mezzanine,dustinrb/mezzanine,orlenko/plei,fusionbox/mezzanine,emile2016/mezzanine,geodesign/mezzanine,frankier/mezzanine,dsanders11/mezzanine,SoLoHiC/mezzanine,spookylukey/mezzanine,ryneeverett/mezzanine,dovydas/mezzanine,webounty/mezzanine,Kniyl/mezzanine,SoLoHiC/mezzanine,Cicero-Zhao/mezzanine,dustinrb/mezzanine,ryneeverett/mezzanine,adrian-the-git/mezzanine,fusionbox/mezzanine,gbosh/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,stephenmcd/mezzanine,jjz/mezzanine,molokov/mezzanine,readevalprint/mezzanine,eino-makitalo/mezzanine,sjdines/mezzanine,wyzex/mezzanine,orlenko/sfpirg,ZeroXn/mezzanine,damnfine/mezzanine,Cajoline/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,wrwrwr/mezzanine,wyzex/mezzanine,Cajoline/mezzanine,frankchin/mezzanine,promil23/mezzanine,tuxinhang1989/mezzanine,orlenko/sfpirg,saintbird/mezzanine,agepoly/mezzanine,joshcartme/mezzanine,PegasusWang/mezzanine,christianwgd/mezzanine,webounty/mezzanine,theclanks/mezzanine,sjdines/mezzanine,emile2016/mezzanine,readevalprint/mezzanine,orlenko/plei,Kniyl/mezzanine,gradel/mezzanine,jerivas/mezzanine,geodesign/mezzanine,sjuxax/mezzanine,christianwgd/mezzanine,spookylukey/mezzanine,scarcry/snm-mezzanine,cccs-web/mezzanine,jjz/mezzanine,adrian-the-git/mezzanine,wrwrwr/mezzanine,vladir/mezzanine,AlexHill/mezzanine,wbtuomela/mezzanine,dekomote/mezzanine-modeltranslation-backport,guibernardino/mezzanine,jerivas/mezzanine,scarcry/snm-mezzanine,nikolas/mezzanine,viaregio/mezzanine,frankchin/mezzanine,damnfine/mezzanine,gbosh/mezzanine,dsanders11/mezzanine,joshcartme/mezzanine,Skytorn86/mezzanine,frankier/mezzanine,adrian-the-git/mezzanine,frankchin/mezzanine,readevalprint/mezzanine,Cajoline/mezzanine,douglaskastle/mezzanine,gradel/mezzanine,damnfine/mezzanine,PegasusWang/mezzanine,vladir/mezzanine,wyzex/mezzanine,tuxinhang1989/mezzanine,Cicero-Zhao/mezzanine,batpad/mezzanine,orlenko/plei,eino-makitalo/mezzanine,orlenko/sfpirg,dovydas/mezzanine,stbarnabas/mezzanine,Skytorn86/mezzanine,dovydas/mezzanine,christianwgd/mezzanine,mush42/mezzanine,vladir/mezzanine,douglaskastle/mezzanine,viaregio/mezzanine,webounty/mezzanine,gradel/mezzanine,batpad/mezzanine,sjuxax/mezzanine,industrydive/mezzanine,tuxinhang1989/mezzanine,nikolas/mezzanine,industrydive/mezzanine,joshcartme/mezzanine,agepoly/mezzanine,mush42/mezzanine,biomassives/mezzanine
--- +++ @@ -1,7 +1,5 @@ from django.db.models import Manager - -from mezzanine.utils.cache import cache_installed class TweetManager(Manager): @@ -25,7 +23,7 @@ return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) - if created or cache_installed(): + if created: query.run() elif not query.interested: query.interested = True
2875a8e6c123d3d4f6039e7864ff66373c51daea
examples/signal_handlers/signal_handlers.py
examples/signal_handlers/signal_handlers.py
# -*- coding: utf-8 -*- from riot.app import quit_app, run_tag from riot.tags.style import parse_style from riot.tags.tags import parse_tag_from_node from riot.tags.utils import convert_string_to_node from riot.virtual_dom import define_tag, mount sig = define_tag('sig', ''' <sig> <filler valign="top"> <pile> <edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" /> <div /> <text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text> <div /> <button id="exit" label="Exit" onclick="{ exit }" /> </pile> </filler> <script> import urwid def init(self, opts): import urwid self.name = opts['name'] def answer(self, edit, text): self.update({'name': text}) def exit(self, button): import urwid raise urwid.ExitMainLoop() </script> </sig> ''') style = ''' .highlight { foreground: default,bold; background: default; mono: bold; } ''' root = convert_string_to_node('<sig></sig>') mount(root, 'sig', 'sig', {'name': 'Default'}) app = parse_tag_from_node(root) run_tag(app, parse_style(style))
# -*- coding: utf-8 -*- from riot.app import quit_app, run_tag from riot.tags.style import parse_style from riot.tags.tags import parse_tag_from_node from riot.tags.utils import convert_string_to_node from riot.virtual_dom import define_tag, mount sig = define_tag('sig', ''' <sig> <filler valign="top"> <pile> <edit caption="What is your name?" class="highlight" id="ask" onchange="{ answer }" /> <div /> <text><span if="{ name }">Nick to meet you, </span><span class="highlight">{ name }</span></text> <div /> <button id="exit" label="Exit" onclick="{ exit }" /> </pile> </filler> <script> import urwid def init(self, opts): import urwid self.name = opts['name'] def answer(self, edit, text): self.name = text </script> </sig> ''') style = ''' .highlight { foreground: default,bold; background: default; mono: bold; } ''' root = convert_string_to_node('<sig></sig>') mount(root, 'sig', 'sig', {'name': 'Default'}) app = parse_tag_from_node(root) run_tag(app, parse_style(style))
Remove useless code in signal handler example.
Remove useless code in signal handler example.
Python
mit
soasme/riotpy
--- +++ @@ -25,14 +25,7 @@ self.name = opts['name'] def answer(self, edit, text): - self.update({'name': text}) - - def exit(self, button): - import urwid - raise urwid.ExitMainLoop() - - - + self.name = text </script> </sig> ''')
79c0071b7aad2992011684428611701bc58a9bff
tests/__init__.py
tests/__init__.py
try: from urllib.parse import urlencode except ImportError: from urllib import urlencode import tornado.testing import tornado.options import celery from flower.app import Flower from flower.urls import handlers from flower.events import Events from flower.urls import settings from flower import command # side effect - define options class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase): def get_app(self): capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=tornado.options.options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app def get(self, url, **kwargs): return self.fetch(url, **kwargs) def post(self, url, **kwargs): if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs)
try: from urllib.parse import urlencode except ImportError: from urllib import urlencode import tornado.testing from tornado.options import options import celery import mock from flower.app import Flower from flower.urls import handlers from flower.events import Events from flower.urls import settings from flower import command # side effect - define options class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase): def get_app(self): capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app def get(self, url, **kwargs): return self.fetch(url, **kwargs) def post(self, url, **kwargs): if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs) def mock_option(self, name, value): return mock.patch.object(options.mockable(), name, value)
Add an util funcion for mocking options
Add an util funcion for mocking options
Python
bsd-3-clause
jzhou77/flower,asmodehn/flower,jzhou77/flower,asmodehn/flower,asmodehn/flower,jzhou77/flower
--- +++ @@ -4,9 +4,10 @@ from urllib import urlencode import tornado.testing -import tornado.options +from tornado.options import options import celery +import mock from flower.app import Flower from flower.urls import handlers @@ -20,8 +21,7 @@ capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, - options=tornado.options.options, - handlers=handlers, **settings) + options=options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app @@ -32,3 +32,6 @@ if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs) + + def mock_option(self, name, value): + return mock.patch.object(options.mockable(), name, value)
4aad37d8186fab025ba29050620a929c167ca497
pulsar/locks.py
pulsar/locks.py
try: import lockfile except ImportError: lockfile = None import threading import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, expect suboptimal Pulsar lock handling." class LockManager(): def __init__(self, lockfile=lockfile): if not lockfile: log.info(NO_PYLOCKFILE_MESSAGE) self.job_locks = dict({}) self.job_locks_lock = threading.Lock() self.lockfile = lockfile def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock def free_lock(self, path): # Not needed with pylockfile # Not currently be called, will result in tiny memory leak if # pylockfile is unavailable - so if you process millions of jobs # install pylockfile. if not self.lockfile: with self.job_locks_lock: if path in self.job_locks: del self.job_locks[path]
try: import lockfile except ImportError: lockfile = None import threading import logging log = logging.getLogger(__name__) NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, skipping experimental lockfile handling." class LockManager(): def __init__(self, lockfile=lockfile): if not lockfile: log.info(NO_PYLOCKFILE_MESSAGE) self.job_locks = dict({}) self.job_locks_lock = threading.Lock() self.lockfile = lockfile def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock def free_lock(self, path): # Not needed with pylockfile # Not currently be called, will result in tiny memory leak if # pylockfile is unavailable - so if you process millions of jobs # install pylockfile. if not self.lockfile: with self.job_locks_lock: if path in self.job_locks: del self.job_locks[path]
Fix misleading message about pylockfile.
Fix misleading message about pylockfile.
Python
apache-2.0
ssorgatem/pulsar,natefoo/pulsar,natefoo/pulsar,ssorgatem/pulsar,galaxyproject/pulsar,galaxyproject/pulsar
--- +++ @@ -8,7 +8,7 @@ import logging log = logging.getLogger(__name__) -NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, expect suboptimal Pulsar lock handling." +NO_PYLOCKFILE_MESSAGE = "pylockfile module not found, skipping experimental lockfile handling." class LockManager():
3f848be239d5fc4ae18d598250e90217b86e8fcf
pywikibot/_wbtypes.py
pywikibot/_wbtypes.py
# -*- coding: utf-8 -*- """Wikibase data type classes.""" # # (C) Pywikibot team, 2013-2015 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __version__ = '$Id$' # import json from pywikibot.tools import StringTypes class WbRepresentation(object): """Abstract class for Wikibase representations.""" def __init__(self): """Constructor.""" raise NotImplementedError def toWikibase(self): """Convert representation to JSON for the Wikibase API.""" raise NotImplementedError @classmethod def fromWikibase(cls, json): """Create a representation object based on JSON from Wikibase API.""" raise NotImplementedError def __str__(self): return json.dumps(self.toWikibase(), indent=4, sort_keys=True, separators=(',', ': ')) def __repr__(self): assert isinstance(self._items, tuple) assert all(isinstance(item, StringTypes) for item in self._items) values = ((attr, getattr(self, attr)) for attr in self._items) attrs = ', '.join('{0}={1}'.format(attr, value) for attr, value in values) return '{0}({1})'.format(self.__class__.__name__, attrs) def __eq__(self, other): return self.__dict__ == other.__dict__
# -*- coding: utf-8 -*- """Wikibase data type classes.""" # # (C) Pywikibot team, 2013-2015 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __version__ = '$Id$' # import json from pywikibot.tools import StringTypes class WbRepresentation(object): """Abstract class for Wikibase representations.""" def __init__(self): """Constructor.""" raise NotImplementedError def toWikibase(self): """Convert representation to JSON for the Wikibase API.""" raise NotImplementedError @classmethod def fromWikibase(cls, json): """Create a representation object based on JSON from Wikibase API.""" raise NotImplementedError def __str__(self): return json.dumps(self.toWikibase(), indent=4, sort_keys=True, separators=(',', ': ')) def __repr__(self): assert isinstance(self._items, tuple) assert all(isinstance(item, StringTypes) for item in self._items) values = ((attr, getattr(self, attr)) for attr in self._items) attrs = ', '.join('{0}={1}'.format(attr, value) for attr, value in values) return '{0}({1})'.format(self.__class__.__name__, attrs) def __eq__(self, other): return self.__dict__ == other.__dict__ def __ne__(self, other): return not self.__eq__(other)
Add missing not-equal comparison for wbtypes
Add missing not-equal comparison for wbtypes Bug: T158848 Change-Id: Ib6e992b7ed1c5b4b8feac205758bdbaebda2b09c
Python
mit
hasteur/g13bot_tools_new,magul/pywikibot-core,jayvdb/pywikibot-core,Darkdadaah/pywikibot-core,happy5214/pywikibot-core,hasteur/g13bot_tools_new,magul/pywikibot-core,happy5214/pywikibot-core,wikimedia/pywikibot-core,npdoty/pywikibot,wikimedia/pywikibot-core,jayvdb/pywikibot-core,npdoty/pywikibot,Darkdadaah/pywikibot-core,PersianWikipedia/pywikibot-core,hasteur/g13bot_tools_new
--- +++ @@ -47,3 +47,6 @@ def __eq__(self, other): return self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not self.__eq__(other)
aa5bc77e78e82fbe63acf2fd8f6764a420f2e4e8
simuvex/procedures/stubs/caller.py
simuvex/procedures/stubs/caller.py
import simuvex ###################################### # Caller ###################################### class Caller(simuvex.SimProcedure): """ Caller stub. Creates a Ijk_Call exit to the specified function """ def run(self, target_addr=None): self.call(target_addr, [ ], self.after_call) def after_call(self): pass
import simuvex ###################################### # Caller ###################################### class Caller(simuvex.SimProcedure): """ Caller stub. Creates a Ijk_Call exit to the specified function """ NO_RET = True def run(self, target_addr=None): self.call(target_addr, [ ], self.after_call) def after_call(self): pass
Make sure Caller does not return
Make sure Caller does not return
Python
bsd-2-clause
axt/angr,chubbymaggie/angr,iamahuman/angr,tyb0807/angr,tyb0807/angr,chubbymaggie/angr,schieb/angr,iamahuman/angr,schieb/angr,angr/angr,angr/simuvex,iamahuman/angr,chubbymaggie/angr,axt/angr,angr/angr,chubbymaggie/simuvex,tyb0807/angr,angr/angr,f-prettyland/angr,f-prettyland/angr,axt/angr,schieb/angr,chubbymaggie/simuvex,chubbymaggie/simuvex,f-prettyland/angr
--- +++ @@ -11,6 +11,8 @@ Caller stub. Creates a Ijk_Call exit to the specified function """ + NO_RET = True + def run(self, target_addr=None): self.call(target_addr, [ ], self.after_call)
2da9a6d5f1668bc78995034ba3bb9ed22172e799
tests/test_cli.py
tests/test_cli.py
import unittest import os from EasyEuler.cli import commands from click.testing import CliRunner class CommandLineInterfaceTestCase(unittest.TestCase): def setUp(self): self.runner = CliRunner() class TestGenerateCommand(CommandLineInterfaceTestCase): def test_file_creation(self): with self.runner.isolated_filesystem(): self.runner.invoke(commands, ['generate', '1']) self.runner.invoke(commands, ['generate', '1', 'c']) self.assertTrue(os.path.exists('euler_001.py')) self.assertTrue(os.path.exists('euler_001.c')) def test_invalid_problem_id(self): result = self.runner.invoke(commands, ['generate', '9999']) self.assertEqual(result.exit_code, 1) if __name__ == '__main__': unittest.main()
import unittest import os from EasyEuler.cli import commands from click.testing import CliRunner class CommandLineInterfaceTestCase(unittest.TestCase): def setUp(self): self.runner = CliRunner() class TestGenerateCommand(CommandLineInterfaceTestCase): def test_file_creation(self): with self.runner.isolated_filesystem(): self.runner.invoke(commands, ['generate', '1']) self.runner.invoke(commands, ['generate', '1', 'c']) self.assertTrue(os.path.exists('euler_001.py')) self.assertTrue(os.path.exists('euler_001.c')) def test_invalid_problem_id(self): result = self.runner.invoke(commands, ['generate', '9999']) self.assertEqual(result.exit_code, 2) if __name__ == '__main__': unittest.main()
Fix invalid problem ID test
Fix invalid problem ID test
Python
mit
Encrylize/EasyEuler
--- +++ @@ -22,7 +22,7 @@ def test_invalid_problem_id(self): result = self.runner.invoke(commands, ['generate', '9999']) - self.assertEqual(result.exit_code, 1) + self.assertEqual(result.exit_code, 2) if __name__ == '__main__':
5b156e3927f2168d9df36f5771c91807704c6dc9
tests/adjust_unittesting_config_for_ci.py
tests/adjust_unittesting_config_for_ci.py
from os.path import abspath, dirname, join import json if __name__ == '__main__': file = abspath(join(dirname(__file__), '..', 'unittesting.json')) with open(file, 'w') as fp: config = { "deferred": True, "verbosity": 0, "capture_console": True, "failfast": True, "reload_package_on_testing": False, "start_coverage_after_reload": False, "show_reload_progress": False, "output": None, "generate_html_report": False } json.dump(config, fp, indent=4)
from os.path import abspath, dirname, join import json if __name__ == '__main__': file = abspath(join(dirname(__file__), '..', 'unittesting.json')) with open(file, 'w') as fp: config = { "deferred": True, "verbosity": 2, "capture_console": True, "failfast": True, "reload_package_on_testing": False, "start_coverage_after_reload": False, "show_reload_progress": False, "output": None, "generate_html_report": False } json.dump(config, fp, indent=4)
Put verbosity to 2 for the tests because otherwise the ci script things we're hanging
Put verbosity to 2 for the tests because otherwise the ci script things we're hanging
Python
mit
tomv564/LSP
--- +++ @@ -7,7 +7,7 @@ with open(file, 'w') as fp: config = { "deferred": True, - "verbosity": 0, + "verbosity": 2, "capture_console": True, "failfast": True, "reload_package_on_testing": False,
18a8a9e90ab0dd31ca1ee147f85d16d0cc7e6bc1
api/__init__.py
api/__init__.py
from api.models import BaseTag TAGS = { 'fairness': { 'color': '#bcf0ff', 'description': 'Fairness is ideas of justice, rights, and autonomy.', }, 'cheating': { 'color': '#feffbc', 'description': 'Cheating is acting dishonestly or unfairly in order to gain an advantage.', }, 'loyalty': { 'color': '#bcffe2', 'description': 'Loyalty underlies virtues of patriotism and self-sacrifice for the group.', }, 'betrayal': { 'color': '#ffe5bc', 'description': 'Betrayal is disloyalty and the destruction of trust.', }, 'care': { 'color': '#bcc1ff', 'description': 'Care is concern for the well-being of others.', }, 'harm': { 'color': '#ffbcf5', 'description': 'Harm is something that causes someone or something to be hurt, broken, made less valuable or successful, etc.', }, 'authority': { 'color': '#ffb29e', 'description': 'Authority underlies virtues of leadership and followership, including deference to legitimate authority and respect for traditions.', }, 'subversion': { 'color' :'#e7bcff', 'description': 'Subversion is the undermining of the power and authority of an established system or institution.', }, 'sanctity': { 'color': '#d6ffbc', 'description': 'Sanctity underlies notions of striving to live in an elevated, less carnal, more noble way.', }, 'degradation': { 'color': '#ffbcd1', 'description': 'Degradation is the process in which the beauty or quality of something is destroyed or spoiled', }, 'morality': { 'color' : '#c1bfc0', 'description': 'Morality is a particular system of values and principles of conduct.', }, }; def populate_base_tags(tags): for tag in tags: BaseTag.objects.get_or_create( name=tag, color=tags[tag]["color"], description=tags[tag]["description"] ) print "Base tags created!" populate_base_tags(TAGS)
Add script to populate Base Tags on app startup
Add script to populate Base Tags on app startup
Python
mit
haystack/eyebrowse-server,haystack/eyebrowse-server,haystack/eyebrowse-server,haystack/eyebrowse-server,haystack/eyebrowse-server
--- +++ @@ -0,0 +1,60 @@ +from api.models import BaseTag + +TAGS = { + 'fairness': { + 'color': '#bcf0ff', + 'description': 'Fairness is ideas of justice, rights, and autonomy.', + }, + 'cheating': { + 'color': '#feffbc', + 'description': 'Cheating is acting dishonestly or unfairly in order to gain an advantage.', + }, + 'loyalty': { + 'color': '#bcffe2', + 'description': 'Loyalty underlies virtues of patriotism and self-sacrifice for the group.', + }, + 'betrayal': { + 'color': '#ffe5bc', + 'description': 'Betrayal is disloyalty and the destruction of trust.', + }, + 'care': { + 'color': '#bcc1ff', + 'description': 'Care is concern for the well-being of others.', + }, + 'harm': { + 'color': '#ffbcf5', + 'description': 'Harm is something that causes someone or something to be hurt, broken, made less valuable or successful, etc.', + }, + 'authority': { + 'color': '#ffb29e', + 'description': 'Authority underlies virtues of leadership and followership, including deference to legitimate authority and respect for traditions.', + }, + 'subversion': { + 'color' :'#e7bcff', + 'description': 'Subversion is the undermining of the power and authority of an established system or institution.', + }, + 'sanctity': { + 'color': '#d6ffbc', + 'description': 'Sanctity underlies notions of striving to live in an elevated, less carnal, more noble way.', + }, + 'degradation': { + 'color': '#ffbcd1', + 'description': 'Degradation is the process in which the beauty or quality of something is destroyed or spoiled', + }, + 'morality': { + 'color' : '#c1bfc0', + 'description': 'Morality is a particular system of values and principles of conduct.', + }, +}; + +def populate_base_tags(tags): + for tag in tags: + BaseTag.objects.get_or_create( + name=tag, + color=tags[tag]["color"], + description=tags[tag]["description"] + ) + + print "Base tags created!" + +populate_base_tags(TAGS)
47348a032bf86aac563dca41703f1e39d03b2360
aplpy/header.py
aplpy/header.py
from __future__ import absolute_import def check(header, convention=None, dimensions=[0, 1]): ix = dimensions[0] + 1 iy = dimensions[1] + 1 ctypex = header['CTYPE%i' % ix] crvaly = header['CRVAL%i' % iy] crpixy = header['CRPIX%i' % iy] cdelty = header['CDELT%i' % iy] # Check for CRVAL2!=0 for CAR projection if ctypex[4:] == '-CAR' and crvaly != 0: if convention in ['wells', 'calabretta']: if convention == 'wells': crpixy = crpixy - crvaly / cdelty header.update('CRPIX%i' % iy, crpixy) header.update('CRVAL%i' % iy, 0.0) else: pass else: raise Exception('''WARNING: projection is Plate Caree (-CAR) and CRVAL2 is not zero. This can be intepreted either according to Wells (1981) or Calabretta (2002). The former defines the projection as rectilinear regardless of the value of CRVAL2, whereas the latter defines the projection as rectilinear only when CRVAL2 is zero. You will need to specify the convention to assume by setting either convention='wells' or convention='calabretta' when initializing the FITSFigure instance. ''') return header
from __future__ import absolute_import def check(header, convention=None, dimensions=[0, 1]): ix = dimensions[0] + 1 iy = dimensions[1] + 1 ctypex = header['CTYPE%i' % ix] crvaly = header['CRVAL%i' % iy] # Check for CRVAL2!=0 for CAR projection if ctypex[4:] == '-CAR' and crvaly != 0: if convention in ['wells', 'calabretta']: if convention == 'wells': try: crpixy = header['CRPIX%i' % iy] cdelty = header['CDELT%i' % iy] except: raise Exception("Need CDELT to be present for wells convention") crpixy = crpixy - crvaly / cdelty header.update('CRPIX%i' % iy, crpixy) header.update('CRVAL%i' % iy, 0.0) else: pass else: raise Exception('''WARNING: projection is Plate Caree (-CAR) and CRVAL2 is not zero. This can be intepreted either according to Wells (1981) or Calabretta (2002). The former defines the projection as rectilinear regardless of the value of CRVAL2, whereas the latter defines the projection as rectilinear only when CRVAL2 is zero. You will need to specify the convention to assume by setting either convention='wells' or convention='calabretta' when initializing the FITSFigure instance. ''') return header
Fix check for Wells/Calabretta convention
Fix check for Wells/Calabretta convention
Python
mit
mwcraig/aplpy,allisony/aplpy
--- +++ @@ -8,14 +8,17 @@ ctypex = header['CTYPE%i' % ix] crvaly = header['CRVAL%i' % iy] - crpixy = header['CRPIX%i' % iy] - cdelty = header['CDELT%i' % iy] # Check for CRVAL2!=0 for CAR projection if ctypex[4:] == '-CAR' and crvaly != 0: if convention in ['wells', 'calabretta']: if convention == 'wells': + try: + crpixy = header['CRPIX%i' % iy] + cdelty = header['CDELT%i' % iy] + except: + raise Exception("Need CDELT to be present for wells convention") crpixy = crpixy - crvaly / cdelty header.update('CRPIX%i' % iy, crpixy) header.update('CRVAL%i' % iy, 0.0)
11abf077a8c429825c2ba55e42ffa590448da502
examples/django_app/tests/test_settings.py
examples/django_app/tests/test_settings.py
from django.test import TestCase from django.conf import settings class SettingsTestCase(TestCase): def test_modified_settings(self): with self.settings(CHATTERBOT={'name': 'Jim'}): self.assertIn('name', settings.CHATTERBOT) self.assertEqual('Jim', settings.CHATTERBOT['name']) def test_name_setting(self): from django.core.urlresolvers import reverse api_url = reverse('chatterbot:chatterbot') response = self.client.get(api_url) self.assertEqual(response.status_code, 405) self.assertIn('detail', response.json()) self.assertIn('name', response.json()) self.assertEqual('Django ChatterBot Example', response.json()['name'])
from django.test import TestCase from django.conf import settings class SettingsTestCase(TestCase): def test_modified_settings(self): with self.settings(CHATTERBOT={'name': 'Jim'}): self.assertIn('name', settings.CHATTERBOT) self.assertEqual('Jim', settings.CHATTERBOT['name']) def test_name_setting(self): from django.core.urlresolvers import reverse api_url = reverse('chatterbot:chatterbot') response = self.client.get(api_url) self.assertEqual(response.status_code, 405) self.assertIn('detail', response.content) self.assertIn('name', response.content) self.assertIn('Django ChatterBot Example', response.content)
Fix unit tests for Django 1.8
Fix unit tests for Django 1.8
Python
bsd-3-clause
vkosuri/ChatterBot,davizucon/ChatterBot,Reinaesaya/OUIRL-ChatBot,gunthercox/ChatterBot,Gustavo6046/ChatterBot,maclogan/VirtualPenPal,Reinaesaya/OUIRL-ChatBot
--- +++ @@ -16,6 +16,6 @@ response = self.client.get(api_url) self.assertEqual(response.status_code, 405) - self.assertIn('detail', response.json()) - self.assertIn('name', response.json()) - self.assertEqual('Django ChatterBot Example', response.json()['name']) + self.assertIn('detail', response.content) + self.assertIn('name', response.content) + self.assertIn('Django ChatterBot Example', response.content)
9e08eaea0259c702fea2055c4f72d66f8efe204d
fedmsg/schema.py
fedmsg/schema.py
AGENT = 'agent' # Generic use. "Who is responsible for this event?" FIELDS = 'fields' # A list of fields that may be of interest. For instance, # fas uses this to specify which fields were updated in # a user.update event. USER = 'user' # FAS GROUP = 'group' # FAS TAG = 'tag' # For fedora-tagger LOG = 'log' # For fedmsg-logger UPDATE = 'update' # For bodhi # Used only for testing and developing. TEST = 'test' # Build a list for use in validation __k, __v = None, None keys = [__v for __k, __v in globals().items() if not __k.startswith('__')]
__schema = dict( AGENT = 'agent', # Generic use. "Who is responsible for this event?" FIELDS = 'fields', # A list of fields that may be of interest. For instance, # fas uses this to specify which fields were updated in # a user.update event. USER = 'user', # FAS GROUP = 'group', # FAS TAG = 'tag', # For fedora-tagger LOG = 'log', # For fedmsg-logger UPDATE = 'update', # For bodhi # Used only for testing and developing. TEST = 'test', ) # Add these to the toplevel for backwards compat for __i in __schema: vars()[__i] = __schema[__i] # Build a set for use in validation ### TODO: Consider renaming this as it's not really the "keys" here keys = frozenset(__schema.values())
Fix a bug on python-2.6 and use a frozenset()
Fix a bug on python-2.6 and use a frozenset() Signed-off-by: Ralph Bean <bcd66b84ebceb8404db9191d837c83f1b20bab8e@redhat.com>
Python
lgpl-2.1
chaiku/fedmsg,pombredanne/fedmsg,chaiku/fedmsg,chaiku/fedmsg,pombredanne/fedmsg,maxamillion/fedmsg,cicku/fedmsg,maxamillion/fedmsg,maxamillion/fedmsg,cicku/fedmsg,pombredanne/fedmsg,cicku/fedmsg,mathstuf/fedmsg,fedora-infra/fedmsg,vivekanand1101/fedmsg,fedora-infra/fedmsg,vivekanand1101/fedmsg,mathstuf/fedmsg,vivekanand1101/fedmsg,mathstuf/fedmsg,fedora-infra/fedmsg
--- +++ @@ -1,18 +1,24 @@ -AGENT = 'agent' # Generic use. "Who is responsible for this event?" -FIELDS = 'fields' # A list of fields that may be of interest. For instance, - # fas uses this to specify which fields were updated in - # a user.update event. +__schema = dict( + AGENT = 'agent', # Generic use. "Who is responsible for this event?" + FIELDS = 'fields', # A list of fields that may be of interest. For instance, + # fas uses this to specify which fields were updated in + # a user.update event. -USER = 'user' # FAS -GROUP = 'group' # FAS + USER = 'user', # FAS + GROUP = 'group', # FAS -TAG = 'tag' # For fedora-tagger -LOG = 'log' # For fedmsg-logger -UPDATE = 'update' # For bodhi + TAG = 'tag', # For fedora-tagger + LOG = 'log', # For fedmsg-logger + UPDATE = 'update', # For bodhi -# Used only for testing and developing. -TEST = 'test' + # Used only for testing and developing. + TEST = 'test', +) -# Build a list for use in validation -__k, __v = None, None -keys = [__v for __k, __v in globals().items() if not __k.startswith('__')] +# Add these to the toplevel for backwards compat +for __i in __schema: + vars()[__i] = __schema[__i] + +# Build a set for use in validation +### TODO: Consider renaming this as it's not really the "keys" here +keys = frozenset(__schema.values())
f9b3fd3d5e14c7ecfc59848e9edf6d99d5c6b98d
ffmpy/ffprobe.py
ffmpy/ffprobe.py
import json from ffmpy import FF class FFprobe(FF): """ Wrapper for `ffprobe <https://www.ffmpeg.org/ffprobe.html>`_. Utilizes ffmpeg `pipe protocol <https://www.ffmpeg.org/ffmpeg-protocols.html#pipe>`_. Input data (as a byte string) is passed to ffprobe on standard input. Result is presented in JSON format. """ def __init__(self, executable='ffprobe', global_options='', inputs=None): """Create an instance of FFprobe. :param str executable: absolute path to ffprobe executable :param list, str global_options: global options passed to ffmpeg executable :param dict inputs: a dictionary specifying one or more inputs as keys with their corresponding options as values """ super(FFprobe, self).__init__( executable=executable, global_options=global_options, inputs=inputs ) def run(self, input_data=None): """Run ffprobe command and return its output. If the command line contains `-print_format json` also parses the JSON output and deserializes it into a dictionary. :param str input_data: media (audio, video, transport stream) data as a byte string (e.g. the result of reading a file in binary mode) :return: dictionary describing the input media :rtype: dict """ output = super(FFprobe, self).run(input_data) if '-print_format json' in self.cmd_str: output = json.loads(output) # TODO: Convert all "numeric" strings to int/float return output
import json from ffmpy import FF class FFprobe(FF): """ Wrapper for `ffprobe <https://www.ffmpeg.org/ffprobe.html>`_. Utilizes ffmpeg `pipe protocol <https://www.ffmpeg.org/ffmpeg-protocols.html#pipe>`_. Input data (as a byte string) is passed to ffprobe on standard input. Result is presented in JSON format. """ def __init__(self, executable='ffprobe', global_options='', inputs=None): """Create an instance of FFprobe. :param str executable: absolute path to ffprobe executable :param list, str global_options: global options passed to ffmpeg executable :param dict inputs: a dictionary specifying one or more inputs as keys with their corresponding options as values """ super(FFprobe, self).__init__( executable=executable, global_options=global_options, inputs=inputs ) def run(self, input_data=None): """Run ffprobe command and return its output. If the command line contains `-print_format json` also parses the JSON output and deserializes it into a dictionary. :param str input_data: media (audio, video, transport stream) data as a byte string (e.g. the result of reading a file in binary mode) :return: dictionary describing the input media :rtype: dict """ output = super(FFprobe, self).run(input_data) if '-print_format json' in self.cmd_str: output = json.loads(output.decode()) # TODO: Convert all "numeric" strings to int/float return output
Fix json parsing for Python 3
Fix json parsing for Python 3
Python
mit
Ch00k/ffmpy,wchill/ffmpy3,astroza/ffmpy,astroza/ffmpy,Ch00k/ffmpy,wchill/ffmpy3
--- +++ @@ -35,7 +35,7 @@ """ output = super(FFprobe, self).run(input_data) if '-print_format json' in self.cmd_str: - output = json.loads(output) + output = json.loads(output.decode()) # TODO: Convert all "numeric" strings to int/float return output
9836c275d79851010654aacda379ccb78cea1b27
chartflo/engine.py
chartflo/engine.py
import pandas as pd from goerr import err from dataswim import DataSwim from django.db.models.query import QuerySet from django_pandas.io import read_frame class ChartFlo(DataSwim): def __repr__(self): """ String representation of the object """ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" def load_data(self, dataset): """ Set the main dataframe with the input data """ try: df = self._load_data(dataset) self.df = df except Exception as e: err.new(e, self.load_data, "Can not load dataset") def load_data_(self, dataset): """ Returns an instance with the input data """ try: df = self._load_data(dataset) return self.clone_(df) except Exception as e: err.new(e, self._load_data, "Can not load dataset") def _load_data(self, dataset): """ Convert the input data to pandas dataframe """ df = pd.DataFrame() try: if isinstance(dataset, pd.DataFrame): return dataset elif isinstance(dataset, QuerySet): df = read_frame(dataset) elif isinstance(dataset, dict): df = self._dict_to_df(dataset) elif isinstance(dataset, list): return pd.DataFrame(dataset) else: err.new(self._load_data, "Data format unknown: " + str(type(dataset)) + " please provide a dictionnary, a Django Queryset or a Pandas DataFrame") except Exception as e: err.new(e, self._load_data, "Can not convert dataset") if err.exists: err.throw() return df def _dict_to_df(self, dictobj): """ Converts a dictionary to a pandas dataframe """ x = [] y = [] print("DICT") for datapoint in dictobj: x.append(datapoint) y.append(dictobj[datapoint]) df = pd.DataFrame(dictobj) return df cf = ChartFlo()
from dataswim import DataSwim class ChartFlo(DataSwim): def __repr__(self): """ String representation of the object """ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" cf = ChartFlo()
Move the load_data method to the Dataswim lib
Move the load_data method to the Dataswim lib
Python
mit
synw/django-chartflo,synw/django-chartflo,synw/django-chartflo
--- +++ @@ -1,8 +1,4 @@ -import pandas as pd -from goerr import err from dataswim import DataSwim -from django.db.models.query import QuerySet -from django_pandas.io import read_frame class ChartFlo(DataSwim): @@ -14,63 +10,5 @@ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" - def load_data(self, dataset): - """ - Set the main dataframe with the input data - """ - try: - df = self._load_data(dataset) - self.df = df - except Exception as e: - err.new(e, self.load_data, "Can not load dataset") - - def load_data_(self, dataset): - """ - Returns an instance with the input data - """ - try: - df = self._load_data(dataset) - return self.clone_(df) - except Exception as e: - err.new(e, self._load_data, "Can not load dataset") - - def _load_data(self, dataset): - """ - Convert the input data to pandas dataframe - """ - df = pd.DataFrame() - try: - if isinstance(dataset, pd.DataFrame): - return dataset - elif isinstance(dataset, QuerySet): - df = read_frame(dataset) - elif isinstance(dataset, dict): - df = self._dict_to_df(dataset) - elif isinstance(dataset, list): - return pd.DataFrame(dataset) - else: - err.new(self._load_data, - "Data format unknown: " - + str(type(dataset)) + - " please provide a dictionnary, a Django Queryset or a Pandas DataFrame") - except Exception as e: - err.new(e, self._load_data, "Can not convert dataset") - if err.exists: - err.throw() - return df - - def _dict_to_df(self, dictobj): - """ - Converts a dictionary to a pandas dataframe - """ - x = [] - y = [] - print("DICT") - for datapoint in dictobj: - x.append(datapoint) - y.append(dictobj[datapoint]) - df = pd.DataFrame(dictobj) - return df - cf = ChartFlo()
b28dd26792be9125d2fd3d5657431bc6ee7a5470
lobster/cmssw/actions.py
lobster/cmssw/actions.py
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid)) self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
Add message in log with plotting process id.
Add message in log with plotting process id.
Python
mit
matz-e/lobster,matz-e/lobster,matz-e/lobster
--- +++ @@ -26,6 +26,7 @@ self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() + logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid)) self.__last = datetime.datetime.now()
6e76b51f5aa1c5ae54130f52e176195a992284aa
src/core/monkeypatch.py
src/core/monkeypatch.py
from django.conf import settings from django.core.urlresolvers import reverse as django_reverse from django.utils.encoding import iri_to_uri from core.middleware import GlobalRequestMiddleware def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None): """ This monkey patch will add the journal_code to reverse kwargs if the URL_CONFIG setting is set to 'patch' """ if not viewname.startswith('djdt'): local_request = GlobalRequestMiddleware.get_current_request() if settings.URL_CONFIG == 'path': code = local_request.journal.code if local_request.journal else 'press' if kwargs and not args: kwargs['journal_code'] = code else: kwargs = {'journal_code': code} # Drop kwargs if user is accessing admin site. if local_request.path.startswith('/admin/'): kwargs.pop('journal_code') # Drop kwargs if we have args (most likely from the template if args: kwargs = None args = [code] + args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # Ensure any unicode characters in the URL are escaped. return iri_to_uri(url)
from django.conf import settings from django.core.urlresolvers import reverse as django_reverse from django.utils.encoding import iri_to_uri from core.middleware import GlobalRequestMiddleware def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None): """ This monkey patch will add the journal_code to reverse kwargs if the URL_CONFIG setting is set to 'patch' """ if not viewname.startswith('djdt'): local_request = GlobalRequestMiddleware.get_current_request() if settings.URL_CONFIG == 'path': code = local_request.journal.code if local_request.journal else 'press' if kwargs and not args: kwargs['journal_code'] = code else: kwargs = {'journal_code': code} # Drop kwargs if user is accessing admin site. if local_request.path.startswith('/admin/'): kwargs.pop('journal_code') # Drop kwargs if we have args (most likely from the template if args: kwargs = None if settings.URL_CONFIG == 'path' and not local_request.path.startswith('/admin/'): args = tuple([code] + [x for x in args]) else: args = args url = django_reverse(viewname, urlconf, args, kwargs, current_app) # Ensure any unicode characters in the URL are escaped. return iri_to_uri(url)
Update for janeway's monkey patch.
Update for janeway's monkey patch.
Python
agpl-3.0
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
--- +++ @@ -27,7 +27,10 @@ # Drop kwargs if we have args (most likely from the template if args: kwargs = None - args = [code] + args + if settings.URL_CONFIG == 'path' and not local_request.path.startswith('/admin/'): + args = tuple([code] + [x for x in args]) + else: + args = args url = django_reverse(viewname, urlconf, args, kwargs, current_app)
986b15b5f33ebf25b26f40645378174bb66f1898
gerberlicious.py
gerberlicious.py
""" gerberlicious, a python library for programmatically generating Gerber files Example script. """ from gerberlicious.point import Point from gerberlicious.layer import Layer from gerberlicious.aperture import CircleAperture from gerberlicious.drawable import PointList, ApertureFlash from gerberlicious.render import GerberRenderer, SVGRenderer if __name__ == "__main__": layer = Layer() aperture1 = CircleAperture("10", 0.1) layer.add_aperture(aperture1) aperture2 = CircleAperture("11", 0.2, 0.1) layer.add_aperture(aperture2) square = PointList(aperture1) square.add_point(Point(2.5, 0)) square.add_point(Point(5, 0)) square.add_point(Point(5, 5)) square.add_point(Point(0, 5)) square.add_point(Point(0, 2.5)) square.add_point(Point(2.5, 0)) layer.add_shape(square) donut = ApertureFlash(aperture2, Point(0, 5)) layer.add_shape(donut) gr = GerberRenderer(layer) gr.write_file("out.grb") sr = SVGRenderer(layer) sr.write_file("out.svg")
""" gerberlicious, a python library for programmatically generating Gerber files Example script. """ from gerberlicious.point import Point from gerberlicious.layer import Layer from gerberlicious.aperture import CircleAperture from gerberlicious.drawable import PointList, ApertureFlash from gerberlicious.render import GerberRenderer, SVGRenderer if __name__ == "__main__": layer = Layer() aperture1 = CircleAperture("10", 0.1) layer.add_aperture(aperture1) aperture2 = CircleAperture("11", 0.2, 0.1) layer.add_aperture(aperture2) path = PointList(aperture1) path.add_point(Point(2.5, 0)) path.add_point(Point(5, 0)) path.add_point(Point(5, 5)) path.add_point(Point(0, 5)) path.add_point(Point(0, 2.5)) path.add_point(Point(2.5, 0)) layer.add_shape(path) donut = ApertureFlash(aperture2, Point(0, 5)) layer.add_shape(donut) gr = GerberRenderer(layer) gr.write_file("out.grb") sr = SVGRenderer(layer) sr.write_file("out.svg")
Rename 'square' to 'path' in example script
Rename 'square' to 'path' in example script
Python
mit
deveah/gerberlicious
--- +++ @@ -20,14 +20,14 @@ aperture2 = CircleAperture("11", 0.2, 0.1) layer.add_aperture(aperture2) - square = PointList(aperture1) - square.add_point(Point(2.5, 0)) - square.add_point(Point(5, 0)) - square.add_point(Point(5, 5)) - square.add_point(Point(0, 5)) - square.add_point(Point(0, 2.5)) - square.add_point(Point(2.5, 0)) - layer.add_shape(square) + path = PointList(aperture1) + path.add_point(Point(2.5, 0)) + path.add_point(Point(5, 0)) + path.add_point(Point(5, 5)) + path.add_point(Point(0, 5)) + path.add_point(Point(0, 2.5)) + path.add_point(Point(2.5, 0)) + layer.add_shape(path) donut = ApertureFlash(aperture2, Point(0, 5)) layer.add_shape(donut)
ed667175f4961bbc7cc823657a5dd80f35ed9593
organizer/migrations/0002_tag_data.py
organizer/migrations/0002_tag_data.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models TAGS = ( # ( tag name, tag slug ), ("augmented reality", "augmented-reality"), ("big data", "big-data"), ("django", "django"), ("education", "education"), ("ipython", "ipython"), ("javascript", "javascript"), ("jupyter", "jupyter"), ("mobile", "mobile"), ("node.js", "node-js"), ("php", "php"), ("python", "python"), ("ruby on rails", "ruby-on-rails"), ("ruby", "ruby"), ("video games", "video-games"), ("web", "web"), ("zend", "zend"), ) def add_tag_data(apps, schema_editor): Tag = apps.get_model('organizer', 'Tag') for tag_name, tag_slug in TAGS: Tag.objects.create( name=tag_name, slug=tag_slug) def remove_tag_data(apps, schema_editor): Tag = apps.get_model('organizer', 'Tag') for _, tag_slug in TAGS: tag = Tag.objects.get(slug=tag_slug) tag.delete() class Migration(migrations.Migration): dependencies = [ ('organizer', '0001_initial'), ] operations = [ migrations.RunPython( add_tag_data, remove_tag_data) ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models TAGS = ( # ( tag name, tag slug ), ("augmented reality", "augmented-reality"), ("big data", "big-data"), ("django", "django"), ("education", "education"), ("ipython", "ipython"), ("javascript", "javascript"), ("jupyter", "jupyter"), ("mobile", "mobile"), ("node.js", "node-js"), ("php", "php"), ("python", "python"), ("ruby on rails", "ruby-on-rails"), ("ruby", "ruby"), ("video games", "video-games"), ("web", "web"), ("zend", "zend"), ) def add_tag_data(apps, schema_editor): Tag = apps.get_model('organizer', 'Tag') tag_list = [] for tag_name, tag_slug in TAGS: tag_list.append( Tag(name=tag_name, slug=tag_slug)) Tag.objects.bulk_create(tag_list) def remove_tag_data(apps, schema_editor): Tag = apps.get_model('organizer', 'Tag') for _, tag_slug in TAGS: tag = Tag.objects.get(slug=tag_slug) tag.delete() class Migration(migrations.Migration): dependencies = [ ('organizer', '0001_initial'), ] operations = [ migrations.RunPython( add_tag_data, remove_tag_data) ]
Optimize addition of Tag data in migration.
Ch26: Optimize addition of Tag data in migration.
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
--- +++ @@ -26,10 +26,11 @@ def add_tag_data(apps, schema_editor): Tag = apps.get_model('organizer', 'Tag') + tag_list = [] for tag_name, tag_slug in TAGS: - Tag.objects.create( - name=tag_name, - slug=tag_slug) + tag_list.append( + Tag(name=tag_name, slug=tag_slug)) + Tag.objects.bulk_create(tag_list) def remove_tag_data(apps, schema_editor):
cb17da3fbd819a386446bc2af42e38f8c95bc392
blango/forms.py
blango/forms.py
from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.contrib.auth.models import User try: from django import newforms as forms except ImportError: from django import forms from blango.models import Comment # This violates the DRY principe, but it's the only # way I found for editing staff comments from # the Django admin application class CommentForm(forms.ModelForm): author = forms.CharField(label=_('Name'), max_length=16) author_uri = forms.CharField(label=_('Website'), max_length=256, required=False) author_email = forms.EmailField(label=_('Email'), help_text=mark_safe('<span class="small">%s</span>' % _('(Won\'t be published)'))) class Meta: model = Comment fields = ('author', 'author_uri', 'author_email', 'body') def save(self, entry): self.instance.entry = entry super(CommentForm, self).save() def clean_author(self): author = self.cleaned_data['author'] try: User.objects.get(username=author) raise forms.ValidationError(_('This username belongs to a registered user')) except User.DoesNotExist: return author class UserCommentForm(forms.ModelForm): class Meta: model = Comment fields = ('body', ) def save(self, entry, request): self.instance.user = request.user super(UserCommentForm, self).save(entry)
from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.contrib.auth.models import User try: from django import newforms as forms except ImportError: from django import forms from blango.models import Comment # This violates the DRY principe, but it's the only # way I found for editing staff comments from # the Django admin application class CommentForm(forms.ModelForm): author = forms.CharField(label=_('Name'), max_length=16) author_uri = forms.CharField(label=_('Website'), max_length=256, required=False) author_email = forms.EmailField(label=_('Email'), help_text=mark_safe('<span class="small">%s</span>' % _('(Won\'t be published)'))) class Meta: model = Comment fields = ('author', 'author_uri', 'author_email', 'body') def save(self, entry): self.instance.entry = entry super(CommentForm, self).save() def clean_author(self): author = self.cleaned_data['author'] try: User.objects.get(username=author) raise forms.ValidationError(_('This username belongs to a registered user')) except User.DoesNotExist: return author class UserCommentForm(forms.ModelForm): class Meta: model = Comment fields = ('body', ) def save(self, entry, request): self.instance.user = request.user self.instance.entry = entry super(UserCommentForm, self).save(entry)
Fix UserCommentForm(), which got broken in the previous commit.
Fix UserCommentForm(), which got broken in the previous commit.
Python
bsd-3-clause
fiam/blangoblog,fiam/blangoblog,fiam/blangoblog
--- +++ @@ -41,5 +41,6 @@ def save(self, entry, request): self.instance.user = request.user + self.instance.entry = entry super(UserCommentForm, self).save(entry)
b0311af3895b359ce3a1ea1fad953c0b41585ce8
app/cache_handler.py
app/cache_handler.py
"""This module contains a cache handler.""" __author__ = 'Aaron Steele' # MOL imports import cache # Standard Python imports import json import logging import urllib import webapp2 # Google App Engine imports from google.appengine.api import urlfetch from google.appengine.ext.webapp.util import run_wsgi_app class GetHandler(webapp2.RequestHandler): """Request handler for cache requests.""" def post(self): """Returns a cached value by key or None if it doesn't exist.""" key = self.request.get('key', 'empty') sql = self.request.get('sql', None) cache_buster = self.request.get('cache_buster', None) if not cache_buster: value = cache.get(key) if not value and sql: url = 'http://mol.cartodb.com/api/v2/sql?%s' % urllib.urlencode(dict(q=sql)) value = urlfetch.fetch(url, deadline=60).content if not json.loads(value).has_key('error') and not cache_buster: cache.add(key, value) self.response.headers["Content-Type"] = "application/json" self.response.out.write(value) application = webapp2.WSGIApplication( [('/cache/get', GetHandler),], debug=True) def main(): run_wsgi_app(application) if __name__ == "__main__": main()
"""This module contains a cache handler.""" __author__ = 'Aaron Steele' # MOL imports import cache # Standard Python imports import json import logging import urllib import webapp2 # Google App Engine imports from google.appengine.api import urlfetch from google.appengine.ext.webapp.util import run_wsgi_app class GetHandler(webapp2.RequestHandler): """Request handler for cache requests.""" def post(self): """Returns a cached value by key or None if it doesn't exist.""" key = self.request.get('key', 'empty') logging.info('SEARCH_KEY=%s' % key) sql = self.request.get('sql', None) cache_buster = self.request.get('cache_buster', None) if not cache_buster: value = cache.get(key) if not value and sql: url = 'http://mol.cartodb.com/api/v2/sql?%s' % urllib.urlencode(dict(q=sql)) value = urlfetch.fetch(url, deadline=60).content if not json.loads(value).has_key('error') and not cache_buster: cache.add(key, value) self.response.headers["Content-Type"] = "application/json" self.response.out.write(value) application = webapp2.WSGIApplication( [('/cache/get', GetHandler),], debug=True) def main(): run_wsgi_app(application) if __name__ == "__main__": main()
Add logging of search key to cache handler.
Add logging of search key to cache handler.
Python
bsd-3-clause
MapofLife/MOL,MapofLife/MOL,MapofLife/MOL,MapofLife/MOL,MapofLife/MOL,MapofLife/MOL,MapofLife/MOL,MapofLife/MOL
--- +++ @@ -21,6 +21,7 @@ def post(self): """Returns a cached value by key or None if it doesn't exist.""" key = self.request.get('key', 'empty') + logging.info('SEARCH_KEY=%s' % key) sql = self.request.get('sql', None) cache_buster = self.request.get('cache_buster', None) if not cache_buster:
c78d9c63238b5535b1881f4eee54700f5a138b04
lupa/__init__.py
lupa/__init__.py
# We need to enable global symbol visibility for lupa in order to # support binary module loading in Lua. If we can enable it here, we # do it temporarily. def _try_import_with_global_library_symbols(): try: import DLFCN dlopen_flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL except ImportError: import ctypes dlopen_flags = ctypes.RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
from __future__ import absolute_import # We need to enable global symbol visibility for lupa in order to # support binary module loading in Lua. If we can enable it here, we # do it temporarily. def _try_import_with_global_library_symbols(): try: from os import RTLD_NOW, RTLD_GLOBAL except ImportError: from DLFCN import RTLD_NOW, RTLD_GLOBAL # Py2.7 dlopen_flags = RTLD_NOW | RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
Use "os.RTLD_*" flags in Py3, as "DLFCN.*" is only the right thing to use in Py2.
Use "os.RTLD_*" flags in Py3, as "DLFCN.*" is only the right thing to use in Py2.
Python
mit
pombredanne/lupa,pombredanne/lupa
--- +++ @@ -1,3 +1,5 @@ +from __future__ import absolute_import + # We need to enable global symbol visibility for lupa in order to # support binary module loading in Lua. If we can enable it here, we @@ -5,11 +7,10 @@ def _try_import_with_global_library_symbols(): try: - import DLFCN - dlopen_flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL + from os import RTLD_NOW, RTLD_GLOBAL except ImportError: - import ctypes - dlopen_flags = ctypes.RTLD_GLOBAL + from DLFCN import RTLD_NOW, RTLD_GLOBAL # Py2.7 + dlopen_flags = RTLD_NOW | RTLD_GLOBAL import sys old_flags = sys.getdlopenflags()
fc2c3ec6680e8c1102a336ec0f6bde7db32d2070
tests/tools/assigner/test_arguments.py
tests/tools/assigner/test_arguments.py
import inspect import sys import unittest from contextlib import contextmanager import kafka.tools.assigner.actions from kafka.tools.assigner.arguments import set_up_arguments from kafka.tools.assigner.modules import get_modules from kafka.tools.assigner.plugins import PluginModule @contextmanager def redirect_err_output(): current_err = sys.stderr try: sys.stderr = sys.stdout yield finally: sys.stderr = current_err class ArgumentTests(unittest.TestCase): def setUp(self): self.null_plugin = PluginModule() def create_action_map(self): self.action_map = dict((cls.name, cls) for cls in get_modules(kafka.tools.assigner.actions, kafka.tools.assigner.actions.ActionModule)) def test_get_arguments_none(self): sys.argv = ['kafka-assigner'] with capture_sys_output() as (stdout, stderr): self.assertRaises(SystemExit, set_up_arguments, {}, {}, [self.null_plugin]) def test_get_modules(self): self.create_action_map() assert 'elect' in self.action_map assert inspect.isclass(self.action_map['elect']) def test_get_arguments_minimum(self): self.create_action_map() sys.argv = ['kafka-assigner', '--zookeeper', 'zkhost1.example.com:2181', 'elect'] args = set_up_arguments(self.action_map, {}, [self.null_plugin]) assert args.action == 'elect'
import inspect import sys import unittest from contextlib import contextmanager import kafka.tools.assigner.actions from kafka.tools.assigner.arguments import set_up_arguments from kafka.tools.assigner.modules import get_modules from kafka.tools.assigner.plugins import PluginModule @contextmanager def redirect_err_output(): current_err = sys.stderr try: sys.stderr = sys.stdout yield finally: sys.stderr = current_err class ArgumentTests(unittest.TestCase): def setUp(self): self.null_plugin = PluginModule() def create_action_map(self): self.action_map = dict((cls.name, cls) for cls in get_modules(kafka.tools.assigner.actions, kafka.tools.assigner.actions.ActionModule)) def test_get_arguments_none(self): sys.argv = ['kafka-assigner'] with redirect_err_output(): self.assertRaises(SystemExit, set_up_arguments, {}, {}, [self.null_plugin]) def test_get_modules(self): self.create_action_map() assert 'elect' in self.action_map assert inspect.isclass(self.action_map['elect']) def test_get_arguments_minimum(self): self.create_action_map() sys.argv = ['kafka-assigner', '--zookeeper', 'zkhost1.example.com:2181', 'elect'] args = set_up_arguments(self.action_map, {}, [self.null_plugin]) assert args.action == 'elect'
Call the CM to mask stderr output properly
Call the CM to mask stderr output properly
Python
apache-2.0
toddpalino/kafka-tools
--- +++ @@ -30,7 +30,7 @@ def test_get_arguments_none(self): sys.argv = ['kafka-assigner'] - with capture_sys_output() as (stdout, stderr): + with redirect_err_output(): self.assertRaises(SystemExit, set_up_arguments, {}, {}, [self.null_plugin]) def test_get_modules(self):
1cc044e601dc6b6d2a5f62c7557a6cd2d5b50986
homepage/views.py
homepage/views.py
from datetime import datetime, timedelta from django.shortcuts import render_to_response from django.template import RequestContext import api def index(request): uid = request.COOKIES.get("uid") if not uid: uid, data = api.create_new_user() else: data = api.get_saved_cities(uid) response = render_to_response( "homepage/index.html", {"saved_cities": data}, context_instance=RequestContext(request) ) expires = datetime.utcnow() + timedelta(days=180) response.set_cookie("uid", uid, expires=expires) return response
from datetime import datetime, timedelta from django.shortcuts import render_to_response from django.template import RequestContext import api def index(request): uid = request.COOKIES.get("uid") data = None if not uid: uid, _ = api.create_new_user() else: data = api.get_saved_cities(uid) response = render_to_response( "homepage/index.html", {"saved_cities": data}, context_instance=RequestContext(request) ) expires = datetime.utcnow() + timedelta(days=180) response.set_cookie("uid", uid, expires=expires) return response
Fix create user in homepage view.
Fix create user in homepage view. api.create_new_user is returning the newly created user record, so we can't pass that as data to the template.
Python
mit
c17r/tsace,c17r/tsace,c17r/tsace
--- +++ @@ -6,8 +6,9 @@ def index(request): uid = request.COOKIES.get("uid") + data = None if not uid: - uid, data = api.create_new_user() + uid, _ = api.create_new_user() else: data = api.get_saved_cities(uid)
c755934a9bc9f15f1e7dcf6d337c3dd3acf4e824
checks/check_solarize.py
checks/check_solarize.py
import imgaug as ia import imgaug.augmenters as iaa def main(): image = ia.quokka_square((128, 128)) images_aug = iaa.Solarize(1.0)(images=[image] * (5*5)) ia.imshow(ia.draw_grid(images_aug)) if __name__ == "__main__": main()
from __future__ import print_function, division, absolute_import import imgaug as ia import imgaug.augmenters as iaa import timeit def main(): for size in [64, 128, 256, 512, 1024]: for threshold in [64, 128, 192]: time_iaa = timeit.timeit( "iaa.solarize(image, %d)" % (threshold,), number=1000, setup=( "import imgaug as ia; " "import imgaug.augmenters as iaa; " "image = ia.quokka_square((%d, %d))" % (size, size)) ) time_pil = timeit.timeit( "np.asarray(" "PIL.ImageOps.solarize(PIL.Image.fromarray(image), %d)" ")" % (threshold,), number=1000, setup=( "import numpy as np; " "import PIL.Image; " "import PIL.ImageOps; " "import imgaug as ia; " "image = ia.quokka_square((%d, %d))" % (size, size)) ) print("[size=%04d, thresh=%03d] iaa=%.4f pil=%.4f" % ( size, threshold, time_iaa, time_pil)) image = ia.quokka_square((128, 128)) images_aug = iaa.Solarize(1.0)(images=[image] * (5*5)) ia.imshow(ia.draw_grid(images_aug)) if __name__ == "__main__": main()
Add performance comparison with PIL
Add performance comparison with PIL
Python
mit
aleju/ImageAugmenter,aleju/imgaug,aleju/imgaug
--- +++ @@ -1,8 +1,35 @@ +from __future__ import print_function, division, absolute_import import imgaug as ia import imgaug.augmenters as iaa +import timeit def main(): + for size in [64, 128, 256, 512, 1024]: + for threshold in [64, 128, 192]: + time_iaa = timeit.timeit( + "iaa.solarize(image, %d)" % (threshold,), + number=1000, + setup=( + "import imgaug as ia; " + "import imgaug.augmenters as iaa; " + "image = ia.quokka_square((%d, %d))" % (size, size)) + ) + time_pil = timeit.timeit( + "np.asarray(" + "PIL.ImageOps.solarize(PIL.Image.fromarray(image), %d)" + ")" % (threshold,), + number=1000, + setup=( + "import numpy as np; " + "import PIL.Image; " + "import PIL.ImageOps; " + "import imgaug as ia; " + "image = ia.quokka_square((%d, %d))" % (size, size)) + ) + print("[size=%04d, thresh=%03d] iaa=%.4f pil=%.4f" % ( + size, threshold, time_iaa, time_pil)) + image = ia.quokka_square((128, 128)) images_aug = iaa.Solarize(1.0)(images=[image] * (5*5)) ia.imshow(ia.draw_grid(images_aug))
3068b3d55082947fcd0594945a705c3cc34659ca
tests/test_python_solutions.py
tests/test_python_solutions.py
import glob import json import os import pytest from helpers import solutions_dir # NOTE: If we make solution_files a fixture instead of a normal attr/function, # then we can't use it in pytest's parametrize solution_files = glob.glob(os.path.join(solutions_dir("python"), "*.py")) @pytest.mark.python def test_solutions_exist(): assert solution_files def id_func(param): problem_name, ext = os.path.splitext(os.path.basename(param)) return problem_name @pytest.mark.python @pytest.mark.parametrize("solution_file", solution_files, ids=id_func) def test_submit_file(solution_file, submit_solution): result = submit_solution(solution_file) assert result.get("success") is True, "Failed. Engine output:\n{:}".format( json.dumps(result, indent=4) )
import glob import json import os import pytest from helpers import solutions_dir # NOTE: If we make solution_files a fixture instead of a normal attr/function, # then we can't use it in pytest's parametrize solution_files = glob.glob(os.path.join(solutions_dir("python"), "*.py")) @pytest.mark.python def test_python_solutions_exist(): assert solution_files def id_func(param): problem_name, ext = os.path.splitext(os.path.basename(param)) return problem_name @pytest.mark.python @pytest.mark.parametrize("solution_file", solution_files, ids=id_func) def test_submit_file(solution_file, submit_solution): result = submit_solution(solution_file) assert result.get("success") is True, "Failed. Engine output:\n{:}".format( json.dumps(result, indent=4) )
Rename functions in python test
Rename functions in python test
Python
mit
project-lovelace/lovelace-engine,project-lovelace/lovelace-engine,project-lovelace/lovelace-engine
--- +++ @@ -13,7 +13,7 @@ @pytest.mark.python -def test_solutions_exist(): +def test_python_solutions_exist(): assert solution_files
418be2bfa0f902183b607fa402da75c09bf7e6db
hug/decorators.py
hug/decorators.py
from functools import wraps from collections import OrderedDict import sys def call(url, methods=('ALL', )): def decorator(api_function): module = sys.modules[api_function.__name__] api_definition = sys.modules['hug.hug'].__dict__.setdefault('API_CALLS', OrderedDict()) for method in methods: api_definition.setdefault(url, {})['ALL'] = api_function def interface(request, reponse): return api_function(**request.attributes) api_function.interface = interface return api_function def get(url): return call(url=url, accept=('GET', )) def post(url): return call(url=url, accept=('POST', )) def put(url): return call(url=url, acccept('PUT', )) def delete(url): return call(url=url, accept=('DELETE', ))
from functools import wraps from collections import OrderedDict import sys from falcon import HTTP_METHODS def call(url, methods=HTTP_METHODS): def decorator(api_function): module = sys.modules[api_function.__name__] api_definition = sys.modules['hug.hug'].__dict__.setdefault('HUG_API_CALLS', OrderedDict()) for method in methods: api_definition.setdefault(url, {})["on_{0}".format(method.lower())] = api_function def interface(request, reponse): return api_function(**request.attributes) api_function.interface = interface return api_function def get(url): return call(url=url, accept=('GET', )) def post(url): return call(url=url, accept=('POST', )) def put(url): return call(url=url, acccept('PUT', )) def delete(url): return call(url=url, accept=('DELETE', ))
Use falcon methods as HTTP methods
Use falcon methods as HTTP methods
Python
mit
STANAPO/hug,shaunstanislaus/hug,shaunstanislaus/hug,timothycrosley/hug,philiptzou/hug,janusnic/hug,timothycrosley/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,jean/hug,giserh/hug,origingod/hug,janusnic/hug,yasoob/hug,gbn972/hug,MuhammadAlkarouri/hug,jean/hug,giserh/hug,timothycrosley/hug,gbn972/hug,alisaifee/hug,yasoob/hug,philiptzou/hug,alisaifee/hug,STANAPO/hug,origingod/hug
--- +++ @@ -2,13 +2,15 @@ from collections import OrderedDict import sys +from falcon import HTTP_METHODS -def call(url, methods=('ALL', )): + +def call(url, methods=HTTP_METHODS): def decorator(api_function): module = sys.modules[api_function.__name__] - api_definition = sys.modules['hug.hug'].__dict__.setdefault('API_CALLS', OrderedDict()) + api_definition = sys.modules['hug.hug'].__dict__.setdefault('HUG_API_CALLS', OrderedDict()) for method in methods: - api_definition.setdefault(url, {})['ALL'] = api_function + api_definition.setdefault(url, {})["on_{0}".format(method.lower())] = api_function def interface(request, reponse): return api_function(**request.attributes) @@ -16,6 +18,7 @@ api_function.interface = interface return api_function + def get(url): return call(url=url, accept=('GET', ))
edcf561564a8fe30c80bda750ec0770c5e854ce8
Code/Python/Kamaelia/Examples/Backplane/Forwarding.py
Code/Python/Kamaelia/Examples/Backplane/Forwarding.py
#!/usr/bin/python import time import Axon from Kamaelia.Util.Backplane import * from Kamaelia.Util.Console import * from Kamaelia.Chassis.Pipeline import Pipeline class Source(Axon.ThreadedComponent.threadedcomponent): value = 1 sleep = 1 def main(self): while 1: self.send(str(self.value), "outbox") time.sleep(self.sleep) Backplane("broadcast").activate() Pipeline( Source(), SubscribeTo("broadcast"), ConsoleEchoer(), ).activate() Pipeline( ConsoleReader(), PublishTo("broadcast", forwarder=True), ConsoleEchoer(), ).run()
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1) # # (1) Kamaelia Contributors are listed in the AUTHORS file and at # http://www.kamaelia.org/AUTHORS - please extend this file, # not this notice. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import Axon from Kamaelia.Util.Backplane import * from Kamaelia.Util.Console import * from Kamaelia.Chassis.Pipeline import Pipeline class Source(Axon.ThreadedComponent.threadedcomponent): value = 1 sleep = 1 def main(self): while 1: self.send(str(self.value), "outbox") time.sleep(self.sleep) Backplane("broadcast").activate() Pipeline( Source(), SubscribeTo("broadcast"), ConsoleEchoer(), ).activate() Pipeline( ConsoleReader(), PublishTo("broadcast", forwarder=True), ConsoleEchoer(), ).run()
Change license to Apache 2
Change license to Apache 2
Python
apache-2.0
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
--- +++ @@ -1,4 +1,23 @@ #!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1) +# +# (1) Kamaelia Contributors are listed in the AUTHORS file and at +# http://www.kamaelia.org/AUTHORS - please extend this file, +# not this notice. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import time import Axon
6610648c75dc90800655a3502d4cd24fb47ac406
timpani/webserver/webserver.py
timpani/webserver/webserver.py
import flask import os.path import datetime import urllib.parse from .. import database from .. import configmanager from . import controllers FILE_LOCATION = os.path.abspath(os.path.dirname(__file__)) STATIC_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../static")) CONFIG_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../configs/")) configs = configmanager.ConfigManager(configPath = CONFIG_PATH) authConfig = configs["auth"] app = flask.Flask(__name__, static_folder = STATIC_PATH) app.secret_key = authConfig["signing_key"] app.register_blueprint(controllers.user.blueprint) app.register_blueprint(controllers.admin.blueprint) @app.teardown_request def teardown_request(exception = None): print(flask.session) databaseConnection = database.ConnectionManager.getMainConnection() databaseConnection.session.close()
import flask import os.path import datetime import urllib.parse from .. import database from .. import configmanager from . import controllers FILE_LOCATION = os.path.abspath(os.path.dirname(__file__)) STATIC_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../static")) CONFIG_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../configs/")) configs = configmanager.ConfigManager(configPath = CONFIG_PATH) authConfig = configs["auth"] app = flask.Flask(__name__, static_folder = STATIC_PATH) app.secret_key = authConfig["signing_key"] app.register_blueprint(controllers.user.blueprint) app.register_blueprint(controllers.admin.blueprint) @app.teardown_request def teardown_request(exception = None): databaseConnection = database.ConnectionManager.getMainConnection() databaseConnection.session.close()
Remove session cookie print in teardown
Remove session cookie print in teardown
Python
mit
ollien/Timpani,ollien/Timpani,ollien/Timpani
--- +++ @@ -20,6 +20,5 @@ @app.teardown_request def teardown_request(exception = None): - print(flask.session) databaseConnection = database.ConnectionManager.getMainConnection() databaseConnection.session.close()
9793107fb218bdff796d8df55404156e299e33ea
website/apps/ts_om/check.py
website/apps/ts_om/check.py
import os from django.conf import settings __author__ = 'nreed' url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': '/home/nreed/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } def check_dir(local_dir, typ): if local_dir is None or local_dir == '': return url_dict[typ] if os.name == "nt": if not local_dir.endswith('\\'): local_dir += '\\' else: if not local_dir.endswith('/'): local_dir += '/' return local_dir def check_url(url, typ): if url is None or url == '': return url_dict[typ] if not url.endswith('/'): url += '/' return url
import os from django.conf import settings __author__ = 'nreed' url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } def check_dir(local_dir, typ): if local_dir is None or local_dir == '': return url_dict[typ] if os.name == "nt": if not local_dir.endswith('\\'): local_dir += '\\' else: if not local_dir.endswith('/'): local_dir += '/' return local_dir def check_url(url, typ): if url is None or url == '': return url_dict[typ] if not url.endswith('/'): url += '/' return url
Set default scenarios directory to within root of project.
Set default scenarios directory to within root of project.
Python
mpl-2.0
vecnet/om,vecnet/om,vecnet/om,vecnet/om,vecnet/om
--- +++ @@ -6,7 +6,7 @@ url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', - 'scenarios': '/home/nreed/scenarios/', + 'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' }
b04a01f451b2fe0348af217e9eed905b552cf1cf
lc0201_bitwise_and_of_numbers_range.py
lc0201_bitwise_and_of_numbers_range.py
"""Leetcode 201. Bitwise AND of Numbers Range Medium URL: https://leetcode.com/problems/bitwise-and-of-numbers-range/ Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive. Example 1: Input: [5,7] Output: 4 Example 2: Input: [0,1] Output: 0 """ class Solution(object): def rangeBitwiseAnd(self, m, n): """ :type m: int :type n: int :rtype: int """ if m == 0: return 0 result = m for i in range(m + 1, n + 1): result &= i return result def main(): # Output: 4 m, n = 5, 7 print Solution().rangeBitwiseAnd(m, n) # Output: 0 m, n = 0, 1 print Solution().rangeBitwiseAnd(m, n) if __name__ == '__main__': main()
"""Leetcode 201. Bitwise AND of Numbers Range Medium URL: https://leetcode.com/problems/bitwise-and-of-numbers-range/ Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive. Example 1: Input: [5,7] Output: 4 Example 2: Input: [0,1] Output: 0 """ class SolutionBruteForce(object): def rangeBitwiseAnd(self, m, n): """ :type m: int :type n: int :rtype: int Time limit exceeded. Time complexity: O(n-m). Space complexity: O(1). """ if m == 0: return 0 result = m for i in range(m + 1, n + 1): result &= i return result def main(): # Output: 4 m, n = 5, 7 print SolutionBruteForce().rangeBitwiseAnd(m, n) # Output: 0 m, n = 0, 1 print SolutionBruteForce().rangeBitwiseAnd(m, n) if __name__ == '__main__': main()
Revise class name and add time/space complexity
Revise class name and add time/space complexity
Python
bsd-2-clause
bowen0701/algorithms_data_structures
--- +++ @@ -16,12 +16,17 @@ """ -class Solution(object): +class SolutionBruteForce(object): def rangeBitwiseAnd(self, m, n): """ :type m: int :type n: int :rtype: int + + Time limit exceeded. + + Time complexity: O(n-m). + Space complexity: O(1). """ if m == 0: return 0 @@ -35,11 +40,11 @@ def main(): # Output: 4 m, n = 5, 7 - print Solution().rangeBitwiseAnd(m, n) + print SolutionBruteForce().rangeBitwiseAnd(m, n) # Output: 0 m, n = 0, 1 - print Solution().rangeBitwiseAnd(m, n) + print SolutionBruteForce().rangeBitwiseAnd(m, n) if __name__ == '__main__':
d10344dce7d012de2d434cd205fb0f179e34113c
packages/syft/src/syft/core/tensor/types.py
packages/syft/src/syft/core/tensor/types.py
# relative from .passthrough import AcceptableSimpleType # type: ignore from .passthrough import PassthroughTensor # type: ignore from .passthrough import SupportedChainType # type: ignore
from .passthrough import AcceptableSimpleType # type: ignore # NOQA from .passthrough import PassthroughTensor # type: ignore # NOQA from .passthrough import SupportedChainType # type: ignore # NOQA
Fix flake8 warning by adding flake annotation
Fix flake8 warning by adding flake annotation
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
--- +++ @@ -1,4 +1,3 @@ -# relative -from .passthrough import AcceptableSimpleType # type: ignore -from .passthrough import PassthroughTensor # type: ignore -from .passthrough import SupportedChainType # type: ignore +from .passthrough import AcceptableSimpleType # type: ignore # NOQA +from .passthrough import PassthroughTensor # type: ignore # NOQA +from .passthrough import SupportedChainType # type: ignore # NOQA
c576acc020e60e704dad55f8cd281c4ebb26ad28
plenario/apiary/views.py
plenario/apiary/views.py
from flask import Blueprint, request from json import dumps, loads from redis import Redis from plenario.settings import REDIS_HOST_SAFE blueprint = Blueprint("apiary", __name__) redis = Redis(REDIS_HOST_SAFE) @blueprint.route("/apiary/send_message", methods=["POST"]) def send_message(): try: data = loads(request.data) redis.set(name="AOTMapper_" + data["name"], value=dumps(data["value"])) except (KeyError, ValueError): pass
from collections import defaultdict from json import dumps, loads from traceback import format_exc from flask import Blueprint, make_response, request from redis import Redis from plenario.auth import login_required from plenario.settings import REDIS_HOST_SAFE blueprint = Blueprint("apiary", __name__) redis = Redis(REDIS_HOST_SAFE) # @login_required @blueprint.route("/apiary/send_message", methods=["POST"]) def send_message(): try: data = loads(request.data) redis.set(name="AOTMapper_" + data["name"], value=dumps(data["value"])) return make_response("Message received successfully!", 200) except (KeyError, ValueError): return make_response(format_exc(), 500) @login_required @blueprint.route("/apiary/mapper_errors", methods=["GET"]) def mapper_errors(): errors = defaultdict(list) for key in redis.scan_iter(match="AOTMapper_*"): errors[key].append(redis.get(key)) return make_response(dumps(errors), 200)
Add a rudimentary view for tracking mapper errors
Add a rudimentary view for tracking mapper errors
Python
mit
UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario
--- +++ @@ -1,18 +1,32 @@ -from flask import Blueprint, request +from collections import defaultdict from json import dumps, loads +from traceback import format_exc + +from flask import Blueprint, make_response, request from redis import Redis +from plenario.auth import login_required from plenario.settings import REDIS_HOST_SAFE - blueprint = Blueprint("apiary", __name__) redis = Redis(REDIS_HOST_SAFE) +# @login_required @blueprint.route("/apiary/send_message", methods=["POST"]) def send_message(): try: data = loads(request.data) redis.set(name="AOTMapper_" + data["name"], value=dumps(data["value"])) + return make_response("Message received successfully!", 200) except (KeyError, ValueError): - pass + return make_response(format_exc(), 500) + + +@login_required +@blueprint.route("/apiary/mapper_errors", methods=["GET"]) +def mapper_errors(): + errors = defaultdict(list) + for key in redis.scan_iter(match="AOTMapper_*"): + errors[key].append(redis.get(key)) + return make_response(dumps(errors), 200)
32522114db3c9afc5331a898df3b956b6a3d229a
imagekit/conf.py
imagekit/conf.py
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = None CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' USE_MEMCACHED_SAFE_CACHE_KEY = True def configure_cache_backend(self, value): if value is None: if getattr(settings, 'CACHES', None): value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' else: value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND return value def configure_default_file_storage(self, value): if value is None: value = settings.DEFAULT_FILE_STORAGE return value
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = None CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' USE_MEMCACHED_SAFE_CACHE_KEY = True def configure_cache_backend(self, value): if value is None: try: from django.core.cache.backends.dummy import DummyCache except ImportError: dummy_cache = 'dummy://' else: dummy_cache = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG: value = dummy_cache else: value = ( getattr(settings, 'CACHES', {}).get('default') or getattr(settings, 'CACHE_BACKEND', None) or dummy_cache ) return value def configure_default_file_storage(self, value): if value is None: value = settings.DEFAULT_FILE_STORAGE return value
Improve default cache backend handling
Improve default cache backend handling
Python
bsd-3-clause
FundedByMe/django-imagekit,tawanda/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit
--- +++ @@ -17,10 +17,21 @@ def configure_cache_backend(self, value): if value is None: - if getattr(settings, 'CACHES', None): - value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' + try: + from django.core.cache.backends.dummy import DummyCache + except ImportError: + dummy_cache = 'dummy://' else: - value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND + dummy_cache = 'django.core.cache.backends.dummy.DummyCache' + + if settings.DEBUG: + value = dummy_cache + else: + value = ( + getattr(settings, 'CACHES', {}).get('default') + or getattr(settings, 'CACHE_BACKEND', None) + or dummy_cache + ) return value def configure_default_file_storage(self, value):
ef17e9368cd7776f5777f095ab7dbc0c0f38a326
common/lib/capa/setup.py
common/lib/capa/setup.py
from setuptools import setup, find_packages setup( name="capa", version="0.1", packages=find_packages(exclude=["tests"]), install_requires=['distribute==0.6.30', 'pyparsing==1.5.6'], )
from setuptools import setup, find_packages setup( name="capa", version="0.1", packages=find_packages(exclude=["tests"]), install_requires=['distribute==0.6.28', 'pyparsing==1.5.6'], )
Make capa specify distribute 0.6.28 like the rest of the project
Make capa specify distribute 0.6.28 like the rest of the project
Python
agpl-3.0
shubhdev/edx-platform,mcgachey/edx-platform,philanthropy-u/edx-platform,zofuthan/edx-platform,polimediaupv/edx-platform,SravanthiSinha/edx-platform,rue89-tech/edx-platform,louyihua/edx-platform,adoosii/edx-platform,4eek/edx-platform,BehavioralInsightsTeam/edx-platform,LICEF/edx-platform,mbareta/edx-platform-ft,shubhdev/edx-platform,zadgroup/edx-platform,UXE/local-edx,LICEF/edx-platform,jzoldak/edx-platform,jazztpt/edx-platform,mtlchun/edx,Livit/Livit.Learn.EdX,cselis86/edx-platform,OmarIthawi/edx-platform,CourseTalk/edx-platform,Edraak/edraak-platform,marcore/edx-platform,msegado/edx-platform,auferack08/edx-platform,vasyarv/edx-platform,louyihua/edx-platform,xuxiao19910803/edx,beni55/edx-platform,jzoldak/edx-platform,teltek/edx-platform,edx/edx-platform,ovnicraft/edx-platform,sameetb-cuelogic/edx-platform-test,zhenzhai/edx-platform,beacloudgenius/edx-platform,rue89-tech/edx-platform,cselis86/edx-platform,waheedahmed/edx-platform,rismalrv/edx-platform,ampax/edx-platform,J861449197/edx-platform,syjeon/new_edx,SravanthiSinha/edx-platform,teltek/edx-platform,edx/edx-platform,wwj718/edx-platform,EduPepperPDTesting/pepper2013-testing,dsajkl/123,ZLLab-Mooc/edx-platform,bigdatauniversity/edx-platform,nanolearningllc/edx-platform-cypress-2,devs1991/test_edx_docmode,rationalAgent/edx-platform-custom,arifsetiawan/edx-platform,apigee/edx-platform,Livit/Livit.Learn.EdX,mitocw/edx-platform,Kalyzee/edx-platform,JCBarahona/edX,IITBinterns13/edx-platform-dev,Semi-global/edx-platform,shashank971/edx-platform,mjg2203/edx-platform-seas,SravanthiSinha/edx-platform,Ayub-Khan/edx-platform,raccoongang/edx-platform,jswope00/GAI,devs1991/test_edx_docmode,pepeportela/edx-platform,longmen21/edx-platform,romain-li/edx-platform,appliedx/edx-platform,romain-li/edx-platform,shubhdev/edxOnBaadal,devs1991/test_edx_docmode,ahmadiga/min_edx,vikas1885/test1,y12uc231/edx-platform,kmoocdev/edx-platform,benpatterson/edx-platform,ZLLab-Mooc/edx-platform,Lektorium-LLC/edx-platform,fintech-circle/edx-platform,motion2015/edx-platform,ferabra/edx-platform,motion2015/edx-platform,tanmaykm/edx-platform,wwj718/ANALYSE,rationalAgent/edx-platform-custom,ESOedX/edx-platform,Shrhawk/edx-platform,openfun/edx-platform,zadgroup/edx-platform,alu042/edx-platform,pomegranited/edx-platform,doganov/edx-platform,Stanford-Online/edx-platform,xinjiguaike/edx-platform,BehavioralInsightsTeam/edx-platform,beni55/edx-platform,mtlchun/edx,edx-solutions/edx-platform,utecuy/edx-platform,mjg2203/edx-platform-seas,appliedx/edx-platform,jswope00/GAI,stvstnfrd/edx-platform,raccoongang/edx-platform,shubhdev/openedx,eemirtekin/edx-platform,naresh21/synergetics-edx-platform,shubhdev/openedx,philanthropy-u/edx-platform,jonathan-beard/edx-platform,apigee/edx-platform,RPI-OPENEDX/edx-platform,EDUlib/edx-platform,motion2015/a3,Kalyzee/edx-platform,y12uc231/edx-platform,shubhdev/edxOnBaadal,Unow/edx-platform,shurihell/testasia,DefyVentures/edx-platform,kmoocdev2/edx-platform,xinjiguaike/edx-platform,pku9104038/edx-platform,praveen-pal/edx-platform,cecep-edu/edx-platform,Shrhawk/edx-platform,y12uc231/edx-platform,valtech-mooc/edx-platform,EduPepperPD/pepper2013,eduNEXT/edunext-platform,a-parhom/edx-platform,zerobatu/edx-platform,hamzehd/edx-platform,Softmotions/edx-platform,atsolakid/edx-platform,louyihua/edx-platform,IndonesiaX/edx-platform,dkarakats/edx-platform,jazztpt/edx-platform,chauhanhardik/populo_2,atsolakid/edx-platform,mahendra-r/edx-platform,Edraak/circleci-edx-platform,ampax/edx-platform,synergeticsedx/deployment-wipro,TeachAtTUM/edx-platform,edx/edx-platform,inares/edx-platform,hastexo/edx-platform,itsjeyd/edx-platform,cecep-edu/edx-platform,dsajkl/reqiop,martynovp/edx-platform,tiagochiavericosta/edx-platform,jamiefolsom/edx-platform,mjg2203/edx-platform-seas,pomegranited/edx-platform,rismalrv/edx-platform,nanolearning/edx-platform,WatanabeYasumasa/edx-platform,atsolakid/edx-platform,don-github/edx-platform,mjirayu/sit_academy,devs1991/test_edx_docmode,ak2703/edx-platform,kmoocdev/edx-platform,abdoosh00/edx-rtl-final,polimediaupv/edx-platform,andyzsf/edx,beni55/edx-platform,unicri/edx-platform,Semi-global/edx-platform,shubhdev/edx-platform,dkarakats/edx-platform,motion2015/a3,EDUlib/edx-platform,edry/edx-platform,angelapper/edx-platform,nagyistoce/edx-platform,jazkarta/edx-platform,jswope00/griffinx,procangroup/edx-platform,chauhanhardik/populo,hamzehd/edx-platform,JCBarahona/edX,peterm-itr/edx-platform,edry/edx-platform,ahmedaljazzar/edx-platform,deepsrijit1105/edx-platform,tiagochiavericosta/edx-platform,defance/edx-platform,doismellburning/edx-platform,kamalx/edx-platform,analyseuc3m/ANALYSE-v1,AkA84/edx-platform,antoviaque/edx-platform,ahmadio/edx-platform,proversity-org/edx-platform,IndonesiaX/edx-platform,benpatterson/edx-platform,EduPepperPD/pepper2013,caesar2164/edx-platform,valtech-mooc/edx-platform,yokose-ks/edx-platform,IITBinterns13/edx-platform-dev,UOMx/edx-platform,leansoft/edx-platform,etzhou/edx-platform,leansoft/edx-platform,TsinghuaX/edx-platform,longmen21/edx-platform,miptliot/edx-platform,jamiefolsom/edx-platform,TeachAtTUM/edx-platform,shubhdev/edx-platform,rue89-tech/edx-platform,jruiperezv/ANALYSE,martynovp/edx-platform,morenopc/edx-platform,ubc/edx-platform,eduNEXT/edunext-platform,ovnicraft/edx-platform,shurihell/testasia,Edraak/circleci-edx-platform,solashirai/edx-platform,dcosentino/edx-platform,hkawasaki/kawasaki-aio8-0,edx-solutions/edx-platform,itsjeyd/edx-platform,arifsetiawan/edx-platform,Kalyzee/edx-platform,IONISx/edx-platform,jazkarta/edx-platform,olexiim/edx-platform,bdero/edx-platform,DefyVentures/edx-platform,fly19890211/edx-platform,edry/edx-platform,vikas1885/test1,DNFcode/edx-platform,fly19890211/edx-platform,xuxiao19910803/edx,IONISx/edx-platform,ahmedaljazzar/edx-platform,amir-qayyum-khan/edx-platform,syjeon/new_edx,jazkarta/edx-platform-for-isc,proversity-org/edx-platform,dcosentino/edx-platform,edx-solutions/edx-platform,beacloudgenius/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,kxliugang/edx-platform,nanolearningllc/edx-platform-cypress,waheedahmed/edx-platform,jazztpt/edx-platform,kxliugang/edx-platform,abdoosh00/edx-rtl-final,jruiperezv/ANALYSE,devs1991/test_edx_docmode,kalebhartje/schoolboost,mbareta/edx-platform-ft,xuxiao19910803/edx-platform,beacloudgenius/edx-platform,4eek/edx-platform,itsjeyd/edx-platform,hkawasaki/kawasaki-aio8-0,simbs/edx-platform,benpatterson/edx-platform,miptliot/edx-platform,antonve/s4-project-mooc,mbareta/edx-platform-ft,kursitet/edx-platform,hkawasaki/kawasaki-aio8-1,morenopc/edx-platform,chauhanhardik/populo_2,caesar2164/edx-platform,unicri/edx-platform,vismartltd/edx-platform,atsolakid/edx-platform,mushtaqak/edx-platform,synergeticsedx/deployment-wipro,rismalrv/edx-platform,Softmotions/edx-platform,chrisndodge/edx-platform,ampax/edx-platform-backup,pabloborrego93/edx-platform,hkawasaki/kawasaki-aio8-1,Unow/edx-platform,B-MOOC/edx-platform,vasyarv/edx-platform,CourseTalk/edx-platform,ahmadio/edx-platform,antonve/s4-project-mooc,playm2mboy/edx-platform,solashirai/edx-platform,procangroup/edx-platform,iivic/BoiseStateX,valtech-mooc/edx-platform,tanmaykm/edx-platform,torchingloom/edx-platform,jolyonb/edx-platform,cyanna/edx-platform,franosincic/edx-platform,EduPepperPD/pepper2013,hastexo/edx-platform,jazkarta/edx-platform,kxliugang/edx-platform,xuxiao19910803/edx,beni55/edx-platform,Semi-global/edx-platform,JCBarahona/edX,fintech-circle/edx-platform,kamalx/edx-platform,xuxiao19910803/edx-platform,mitocw/edx-platform,mjirayu/sit_academy,jbassen/edx-platform,jbzdak/edx-platform,kursitet/edx-platform,jzoldak/edx-platform,gymnasium/edx-platform,chrisndodge/edx-platform,zofuthan/edx-platform,doismellburning/edx-platform,franosincic/edx-platform,jbassen/edx-platform,IITBinterns13/edx-platform-dev,10clouds/edx-platform,pepeportela/edx-platform,rhndg/openedx,halvertoluke/edx-platform,pku9104038/edx-platform,cyanna/edx-platform,hkawasaki/kawasaki-aio8-0,DNFcode/edx-platform,Softmotions/edx-platform,pomegranited/edx-platform,halvertoluke/edx-platform,RPI-OPENEDX/edx-platform,ak2703/edx-platform,dsajkl/reqiop,sudheerchintala/LearnEraPlatForm,shabab12/edx-platform,bigdatauniversity/edx-platform,pdehaye/theming-edx-platform,BehavioralInsightsTeam/edx-platform,UXE/local-edx,morpheby/levelup-by,morenopc/edx-platform,gsehub/edx-platform,IndonesiaX/edx-platform,Edraak/edx-platform,morpheby/levelup-by,jonathan-beard/edx-platform,jazkarta/edx-platform,jamiefolsom/edx-platform,chauhanhardik/populo_2,syjeon/new_edx,jamesblunt/edx-platform,cpennington/edx-platform,leansoft/edx-platform,dsajkl/123,antonve/s4-project-mooc,solashirai/edx-platform,SravanthiSinha/edx-platform,CourseTalk/edx-platform,JioEducation/edx-platform,ahmedaljazzar/edx-platform,ubc/edx-platform,zhenzhai/edx-platform,jbzdak/edx-platform,valtech-mooc/edx-platform,jolyonb/edx-platform,chrisndodge/edx-platform,eduNEXT/edx-platform,mahendra-r/edx-platform,shubhdev/edxOnBaadal,xingyepei/edx-platform,Ayub-Khan/edx-platform,pdehaye/theming-edx-platform,simbs/edx-platform,appliedx/edx-platform,ak2703/edx-platform,jonathan-beard/edx-platform,shashank971/edx-platform,devs1991/test_edx_docmode,pku9104038/edx-platform,msegado/edx-platform,deepsrijit1105/edx-platform,rhndg/openedx,ubc/edx-platform,LearnEra/LearnEraPlaftform,benpatterson/edx-platform,zubair-arbi/edx-platform,shubhdev/openedx,utecuy/edx-platform,chudaol/edx-platform,hkawasaki/kawasaki-aio8-1,B-MOOC/edx-platform,kmoocdev2/edx-platform,DNFcode/edx-platform,inares/edx-platform,hkawasaki/kawasaki-aio8-1,hamzehd/edx-platform,eestay/edx-platform,bigdatauniversity/edx-platform,eestay/edx-platform,jamiefolsom/edx-platform,Unow/edx-platform,arifsetiawan/edx-platform,nagyistoce/edx-platform,CredoReference/edx-platform,PepperPD/edx-pepper-platform,mahendra-r/edx-platform,pomegranited/edx-platform,olexiim/edx-platform,IITBinterns13/edx-platform-dev,zerobatu/edx-platform,inares/edx-platform,ZLLab-Mooc/edx-platform,peterm-itr/edx-platform,devs1991/test_edx_docmode,caesar2164/edx-platform,knehez/edx-platform,pabloborrego93/edx-platform,morenopc/edx-platform,msegado/edx-platform,unicri/edx-platform,etzhou/edx-platform,yokose-ks/edx-platform,martynovp/edx-platform,zofuthan/edx-platform,TsinghuaX/edx-platform,jjmiranda/edx-platform,kmoocdev/edx-platform,carsongee/edx-platform,ahmadio/edx-platform,shubhdev/openedx,unicri/edx-platform,jazkarta/edx-platform,amir-qayyum-khan/edx-platform,prarthitm/edxplatform,nttks/jenkins-test,doismellburning/edx-platform,ampax/edx-platform,alexthered/kienhoc-platform,andyzsf/edx,Endika/edx-platform,praveen-pal/edx-platform,waheedahmed/edx-platform,mjirayu/sit_academy,msegado/edx-platform,EduPepperPDTesting/pepper2013-testing,alexthered/kienhoc-platform,rhndg/openedx,jbzdak/edx-platform,praveen-pal/edx-platform,chudaol/edx-platform,vasyarv/edx-platform,iivic/BoiseStateX,zubair-arbi/edx-platform,hmcmooc/muddx-platform,vismartltd/edx-platform,mcgachey/edx-platform,4eek/edx-platform,torchingloom/edx-platform,TsinghuaX/edx-platform,Endika/edx-platform,olexiim/edx-platform,vismartltd/edx-platform,lduarte1991/edx-platform,Edraak/edraak-platform,nanolearning/edx-platform,prarthitm/edxplatform,UXE/local-edx,mahendra-r/edx-platform,chudaol/edx-platform,naresh21/synergetics-edx-platform,EduPepperPD/pepper2013,longmen21/edx-platform,Edraak/circleci-edx-platform,LearnEra/LearnEraPlaftform,adoosii/edx-platform,ampax/edx-platform-backup,pomegranited/edx-platform,utecuy/edx-platform,leansoft/edx-platform,a-parhom/edx-platform,polimediaupv/edx-platform,AkA84/edx-platform,edry/edx-platform,stvstnfrd/edx-platform,antoviaque/edx-platform,alexthered/kienhoc-platform,defance/edx-platform,shashank971/edx-platform,ferabra/edx-platform,arifsetiawan/edx-platform,marcore/edx-platform,atsolakid/edx-platform,shubhdev/openedx,ahmadiga/min_edx,Edraak/edx-platform,EDUlib/edx-platform,carsongee/edx-platform,SivilTaram/edx-platform,praveen-pal/edx-platform,andyzsf/edx,jamesblunt/edx-platform,zadgroup/edx-platform,xuxiao19910803/edx-platform,nagyistoce/edx-platform,cecep-edu/edx-platform,nikolas/edx-platform,wwj718/edx-platform,nttks/edx-platform,doismellburning/edx-platform,jazkarta/edx-platform-for-isc,angelapper/edx-platform,shabab12/edx-platform,xuxiao19910803/edx,zerobatu/edx-platform,simbs/edx-platform,AkA84/edx-platform,alexthered/kienhoc-platform,MakeHer/edx-platform,marcore/edx-platform,apigee/edx-platform,carsongee/edx-platform,stvstnfrd/edx-platform,chauhanhardik/populo_2,dsajkl/reqiop,chudaol/edx-platform,MSOpenTech/edx-platform,mjirayu/sit_academy,J861449197/edx-platform,jelugbo/tundex,WatanabeYasumasa/edx-platform,TsinghuaX/edx-platform,a-parhom/edx-platform,sudheerchintala/LearnEraPlatForm,prarthitm/edxplatform,kmoocdev2/edx-platform,y12uc231/edx-platform,hamzehd/edx-platform,SivilTaram/edx-platform,antonve/s4-project-mooc,longmen21/edx-platform,kalebhartje/schoolboost,mjirayu/sit_academy,defance/edx-platform,synergeticsedx/deployment-wipro,synergeticsedx/deployment-wipro,eemirtekin/edx-platform,prarthitm/edxplatform,Semi-global/edx-platform,ZLLab-Mooc/edx-platform,rationalAgent/edx-platform-custom,andyzsf/edx,cognitiveclass/edx-platform,rhndg/openedx,UOMx/edx-platform,gymnasium/edx-platform,leansoft/edx-platform,DefyVentures/edx-platform,DefyVentures/edx-platform,Edraak/edraak-platform,appliedx/edx-platform,arbrandes/edx-platform,longmen21/edx-platform,B-MOOC/edx-platform,motion2015/a3,jruiperezv/ANALYSE,nanolearningllc/edx-platform-cypress-2,ovnicraft/edx-platform,bitifirefly/edx-platform,wwj718/ANALYSE,teltek/edx-platform,xingyepei/edx-platform,JCBarahona/edX,playm2mboy/edx-platform,yokose-ks/edx-platform,motion2015/edx-platform,playm2mboy/edx-platform,pepeportela/edx-platform,fintech-circle/edx-platform,pdehaye/theming-edx-platform,Ayub-Khan/edx-platform,jamesblunt/edx-platform,procangroup/edx-platform,louyihua/edx-platform,appliedx/edx-platform,kalebhartje/schoolboost,jswope00/GAI,nanolearningllc/edx-platform-cypress-2,shashank971/edx-platform,Semi-global/edx-platform,chand3040/cloud_that,IONISx/edx-platform,miptliot/edx-platform,cpennington/edx-platform,fintech-circle/edx-platform,xuxiao19910803/edx,bitifirefly/edx-platform,simbs/edx-platform,auferack08/edx-platform,hastexo/edx-platform,RPI-OPENEDX/edx-platform,eemirtekin/edx-platform,hkawasaki/kawasaki-aio8-2,eduNEXT/edx-platform,defance/edx-platform,MSOpenTech/edx-platform,adoosii/edx-platform,openfun/edx-platform,pepeportela/edx-platform,adoosii/edx-platform,ESOedX/edx-platform,ahmadio/edx-platform,pabloborrego93/edx-platform,mahendra-r/edx-platform,cselis86/edx-platform,waheedahmed/edx-platform,xingyepei/edx-platform,mcgachey/edx-platform,auferack08/edx-platform,franosincic/edx-platform,romain-li/edx-platform,analyseuc3m/ANALYSE-v1,chauhanhardik/populo,MakeHer/edx-platform,gsehub/edx-platform,antoviaque/edx-platform,PepperPD/edx-pepper-platform,pelikanchik/edx-platform,Shrhawk/edx-platform,jolyonb/edx-platform,antonve/s4-project-mooc,kamalx/edx-platform,yokose-ks/edx-platform,naresh21/synergetics-edx-platform,yokose-ks/edx-platform,nttks/jenkins-test,beacloudgenius/edx-platform,ahmadiga/min_edx,UOMx/edx-platform,Stanford-Online/edx-platform,nanolearning/edx-platform,romain-li/edx-platform,eduNEXT/edunext-platform,sudheerchintala/LearnEraPlatForm,Edraak/edx-platform,abdoosh00/edraak,nikolas/edx-platform,B-MOOC/edx-platform,jazkarta/edx-platform-for-isc,vikas1885/test1,edx/edx-platform,etzhou/edx-platform,doganov/edx-platform,wwj718/ANALYSE,wwj718/edx-platform,mushtaqak/edx-platform,DNFcode/edx-platform,playm2mboy/edx-platform,dsajkl/123,J861449197/edx-platform,torchingloom/edx-platform,IONISx/edx-platform,philanthropy-u/edx-platform,knehez/edx-platform,polimediaupv/edx-platform,peterm-itr/edx-platform,nikolas/edx-platform,tiagochiavericosta/edx-platform,10clouds/edx-platform,Kalyzee/edx-platform,auferack08/edx-platform,wwj718/edx-platform,don-github/edx-platform,lduarte1991/edx-platform,kalebhartje/schoolboost,Lektorium-LLC/edx-platform,nanolearning/edx-platform,dkarakats/edx-platform,ak2703/edx-platform,nikolas/edx-platform,arifsetiawan/edx-platform,jonathan-beard/edx-platform,morpheby/levelup-by,chauhanhardik/populo_2,Edraak/circleci-edx-platform,PepperPD/edx-pepper-platform,zofuthan/edx-platform,dsajkl/123,OmarIthawi/edx-platform,miptliot/edx-platform,chrisndodge/edx-platform,ak2703/edx-platform,Livit/Livit.Learn.EdX,shabab12/edx-platform,Edraak/circleci-edx-platform,Endika/edx-platform,proversity-org/edx-platform,jjmiranda/edx-platform,chauhanhardik/populo,motion2015/a3,sudheerchintala/LearnEraPlatForm,sameetb-cuelogic/edx-platform-test,rationalAgent/edx-platform-custom,eemirtekin/edx-platform,nanolearningllc/edx-platform-cypress,hmcmooc/muddx-platform,pdehaye/theming-edx-platform,valtech-mooc/edx-platform,deepsrijit1105/edx-platform,tiagochiavericosta/edx-platform,eduNEXT/edx-platform,syjeon/new_edx,EDUlib/edx-platform,xuxiao19910803/edx-platform,ovnicraft/edx-platform,nttks/edx-platform,pelikanchik/edx-platform,EduPepperPDTesting/pepper2013-testing,nttks/edx-platform,pelikanchik/edx-platform,LearnEra/LearnEraPlaftform,antoviaque/edx-platform,vikas1885/test1,jswope00/GAI,mtlchun/edx,hkawasaki/kawasaki-aio8-0,hkawasaki/kawasaki-aio8-2,JioEducation/edx-platform,shurihell/testasia,jazkarta/edx-platform-for-isc,Ayub-Khan/edx-platform,Livit/Livit.Learn.EdX,carsongee/edx-platform,hamzehd/edx-platform,kursitet/edx-platform,nanolearningllc/edx-platform-cypress,lduarte1991/edx-platform,Stanford-Online/edx-platform,jazztpt/edx-platform,xingyepei/edx-platform,MSOpenTech/edx-platform,shashank971/edx-platform,nagyistoce/edx-platform,zerobatu/edx-platform,deepsrijit1105/edx-platform,tanmaykm/edx-platform,Edraak/edraak-platform,dsajkl/reqiop,jazztpt/edx-platform,cselis86/edx-platform,shurihell/testasia,playm2mboy/edx-platform,simbs/edx-platform,Kalyzee/edx-platform,AkA84/edx-platform,rue89-tech/edx-platform,teltek/edx-platform,jruiperezv/ANALYSE,vasyarv/edx-platform,alu042/edx-platform,zofuthan/edx-platform,jelugbo/tundex,ahmadiga/min_edx,eestay/edx-platform,chand3040/cloud_that,nttks/jenkins-test,LearnEra/LearnEraPlaftform,jswope00/griffinx,cognitiveclass/edx-platform,BehavioralInsightsTeam/edx-platform,ahmadio/edx-platform,jbassen/edx-platform,jjmiranda/edx-platform,Shrhawk/edx-platform,MakeHer/edx-platform,SivilTaram/edx-platform,polimediaupv/edx-platform,ampax/edx-platform-backup,Edraak/edx-platform,utecuy/edx-platform,zadgroup/edx-platform,philanthropy-u/edx-platform,LICEF/edx-platform,PepperPD/edx-pepper-platform,mushtaqak/edx-platform,nagyistoce/edx-platform,caesar2164/edx-platform,xinjiguaike/edx-platform,jbzdak/edx-platform,apigee/edx-platform,CredoReference/edx-platform,jelugbo/tundex,alexthered/kienhoc-platform,shubhdev/edxOnBaadal,raccoongang/edx-platform,mbareta/edx-platform-ft,PepperPD/edx-pepper-platform,knehez/edx-platform,fly19890211/edx-platform,gsehub/edx-platform,msegado/edx-platform,bdero/edx-platform,vikas1885/test1,beni55/edx-platform,dkarakats/edx-platform,TeachAtTUM/edx-platform,kxliugang/edx-platform,jelugbo/tundex,JioEducation/edx-platform,utecuy/edx-platform,jbassen/edx-platform,shurihell/testasia,nttks/jenkins-test,motion2015/a3,morpheby/levelup-by,eduNEXT/edunext-platform,eduNEXT/edx-platform,abdoosh00/edraak,franosincic/edx-platform,kursitet/edx-platform,amir-qayyum-khan/edx-platform,mtlchun/edx,nanolearning/edx-platform,eemirtekin/edx-platform,hkawasaki/kawasaki-aio8-2,dcosentino/edx-platform,peterm-itr/edx-platform,jamesblunt/edx-platform,pabloborrego93/edx-platform,halvertoluke/edx-platform,pku9104038/edx-platform,chauhanhardik/populo,sameetb-cuelogic/edx-platform-test,EduPepperPDTesting/pepper2013-testing,bigdatauniversity/edx-platform,appsembler/edx-platform,CredoReference/edx-platform,ahmadiga/min_edx,kmoocdev2/edx-platform,SivilTaram/edx-platform,OmarIthawi/edx-platform,kmoocdev/edx-platform,jazkarta/edx-platform-for-isc,zubair-arbi/edx-platform,motion2015/edx-platform,etzhou/edx-platform,zubair-arbi/edx-platform,J861449197/edx-platform,mtlchun/edx,J861449197/edx-platform,jamiefolsom/edx-platform,B-MOOC/edx-platform,solashirai/edx-platform,rationalAgent/edx-platform-custom,wwj718/ANALYSE,MakeHer/edx-platform,zhenzhai/edx-platform,shubhdev/edx-platform,MSOpenTech/edx-platform,openfun/edx-platform,angelapper/edx-platform,don-github/edx-platform,10clouds/edx-platform,waheedahmed/edx-platform,Softmotions/edx-platform,jzoldak/edx-platform,hmcmooc/muddx-platform,bitifirefly/edx-platform,ahmedaljazzar/edx-platform,jonathan-beard/edx-platform,torchingloom/edx-platform,MSOpenTech/edx-platform,ovnicraft/edx-platform,JCBarahona/edX,amir-qayyum-khan/edx-platform,don-github/edx-platform,solashirai/edx-platform,Unow/edx-platform,mushtaqak/edx-platform,Stanford-Online/edx-platform,edx-solutions/edx-platform,ferabra/edx-platform,UXE/local-edx,IndonesiaX/edx-platform,OmarIthawi/edx-platform,ampax/edx-platform-backup,IndonesiaX/edx-platform,mcgachey/edx-platform,MakeHer/edx-platform,stvstnfrd/edx-platform,jswope00/griffinx,mcgachey/edx-platform,vasyarv/edx-platform,abdoosh00/edx-rtl-final,zerobatu/edx-platform,kalebhartje/schoolboost,CredoReference/edx-platform,iivic/BoiseStateX,DNFcode/edx-platform,fly19890211/edx-platform,kxliugang/edx-platform,etzhou/edx-platform,cpennington/edx-platform,EduPepperPDTesting/pepper2013-testing,shabab12/edx-platform,chand3040/cloud_that,wwj718/edx-platform,xingyepei/edx-platform,martynovp/edx-platform,10clouds/edx-platform,motion2015/edx-platform,benpatterson/edx-platform,mitocw/edx-platform,cyanna/edx-platform,Endika/edx-platform,WatanabeYasumasa/edx-platform,bigdatauniversity/edx-platform,TeachAtTUM/edx-platform,LICEF/edx-platform,cognitiveclass/edx-platform,4eek/edx-platform,mushtaqak/edx-platform,appsembler/edx-platform,nikolas/edx-platform,nttks/edx-platform,procangroup/edx-platform,sameetb-cuelogic/edx-platform-test,morenopc/edx-platform,sameetb-cuelogic/edx-platform-test,gsehub/edx-platform,nanolearningllc/edx-platform-cypress,alu042/edx-platform,hkawasaki/kawasaki-aio8-2,cecep-edu/edx-platform,doganov/edx-platform,cecep-edu/edx-platform,abdoosh00/edraak,proversity-org/edx-platform,pelikanchik/edx-platform,ferabra/edx-platform,inares/edx-platform,SravanthiSinha/edx-platform,xuxiao19910803/edx-platform,iivic/BoiseStateX,gymnasium/edx-platform,4eek/edx-platform,edry/edx-platform,kamalx/edx-platform,knehez/edx-platform,chand3040/cloud_that,nanolearningllc/edx-platform-cypress,cselis86/edx-platform,RPI-OPENEDX/edx-platform,don-github/edx-platform,devs1991/test_edx_docmode,bdero/edx-platform,zhenzhai/edx-platform,ferabra/edx-platform,kmoocdev/edx-platform,zhenzhai/edx-platform,xinjiguaike/edx-platform,IONISx/edx-platform,mjg2203/edx-platform-seas,martynovp/edx-platform,analyseuc3m/ANALYSE-v1,Lektorium-LLC/edx-platform,olexiim/edx-platform,dkarakats/edx-platform,naresh21/synergetics-edx-platform,appsembler/edx-platform,bitifirefly/edx-platform,kamalx/edx-platform,RPI-OPENEDX/edx-platform,torchingloom/edx-platform,nttks/edx-platform,abdoosh00/edraak,vismartltd/edx-platform,y12uc231/edx-platform,wwj718/ANALYSE,angelapper/edx-platform,appsembler/edx-platform,ESOedX/edx-platform,mitocw/edx-platform,franosincic/edx-platform,EduPepperPD/pepper2013,hastexo/edx-platform,rismalrv/edx-platform,nttks/jenkins-test,inares/edx-platform,EduPepperPDTesting/pepper2013-testing,halvertoluke/edx-platform,jamesblunt/edx-platform,arbrandes/edx-platform,cognitiveclass/edx-platform,analyseuc3m/ANALYSE-v1,vismartltd/edx-platform,ESOedX/edx-platform,openfun/edx-platform,jruiperezv/ANALYSE,xinjiguaike/edx-platform,zubair-arbi/edx-platform,AkA84/edx-platform,openfun/edx-platform,tiagochiavericosta/edx-platform,doganov/edx-platform,chudaol/edx-platform,Edraak/edx-platform,bdero/edx-platform,a-parhom/edx-platform,shubhdev/edxOnBaadal,adoosii/edx-platform,jjmiranda/edx-platform,rue89-tech/edx-platform,knehez/edx-platform,nanolearningllc/edx-platform-cypress-2,kmoocdev2/edx-platform,jswope00/griffinx,doganov/edx-platform,chauhanhardik/populo,chand3040/cloud_that,jbzdak/edx-platform,eestay/edx-platform,ZLLab-Mooc/edx-platform,hmcmooc/muddx-platform,dsajkl/123,zadgroup/edx-platform,Ayub-Khan/edx-platform,unicri/edx-platform,cpennington/edx-platform,gymnasium/edx-platform,abdoosh00/edx-rtl-final,iivic/BoiseStateX,marcore/edx-platform,ubc/edx-platform,UOMx/edx-platform,raccoongang/edx-platform,alu042/edx-platform,eestay/edx-platform,lduarte1991/edx-platform,jbassen/edx-platform,halvertoluke/edx-platform,Softmotions/edx-platform,rhndg/openedx,bitifirefly/edx-platform,cognitiveclass/edx-platform,beacloudgenius/edx-platform,dcosentino/edx-platform,olexiim/edx-platform,fly19890211/edx-platform,doismellburning/edx-platform,cyanna/edx-platform,JioEducation/edx-platform,WatanabeYasumasa/edx-platform,ubc/edx-platform,SivilTaram/edx-platform,ampax/edx-platform,jelugbo/tundex,itsjeyd/edx-platform,nanolearningllc/edx-platform-cypress-2,ampax/edx-platform-backup,Lektorium-LLC/edx-platform,DefyVentures/edx-platform,jswope00/griffinx,LICEF/edx-platform,romain-li/edx-platform,Shrhawk/edx-platform,kursitet/edx-platform,CourseTalk/edx-platform,tanmaykm/edx-platform,cyanna/edx-platform,rismalrv/edx-platform,jolyonb/edx-platform,dcosentino/edx-platform
--- +++ @@ -4,5 +4,5 @@ name="capa", version="0.1", packages=find_packages(exclude=["tests"]), - install_requires=['distribute==0.6.30', 'pyparsing==1.5.6'], + install_requires=['distribute==0.6.28', 'pyparsing==1.5.6'], )
6b6181f1c2f902f20da440eb3bedb5d02ecfbf16
angr/engines/soot/expressions/cast.py
angr/engines/soot/expressions/cast.py
from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value value_size = ArchSoot.primitive_types[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value value_size = ArchSoot.sizeof[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
Use correct dict for the type sizes
Use correct dict for the type sizes
Python
bsd-2-clause
iamahuman/angr,iamahuman/angr,iamahuman/angr,angr/angr,schieb/angr,schieb/angr,angr/angr,schieb/angr,angr/angr
--- +++ @@ -21,7 +21,7 @@ value_uncasted = self.state.memory.load(local) # lookup the type size and extract value - value_size = ArchSoot.primitive_types[self.expr.cast_type] + value_size = ArchSoot.sizeof[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector
bb0fae91cc0ce067a0e331bc953c7130be4e41c8
neuroimaging/externals/pynifti/nifti/__init__.py
neuroimaging/externals/pynifti/nifti/__init__.py
from niftiimage import NiftiImage
""" Nifti ===== Python bindings for the nifticlibs. Access through the NiftiImage class. See help for pyniftiio.nifti.NiftiImage """ from niftiimage import NiftiImage
Add doc for pynifti package.
DOC: Add doc for pynifti package.
Python
bsd-3-clause
yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD
--- +++ @@ -1,2 +1,11 @@ +""" +Nifti +===== + + Python bindings for the nifticlibs. Access through the NiftiImage class. + + See help for pyniftiio.nifti.NiftiImage + +""" from niftiimage import NiftiImage
2864441be365beb40e0396b444f8d96af8d7d92e
aleph/logic/documents.py
aleph/logic/documents.py
import os import logging from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) def crawl_directory(collection, path, parent=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() ingest_entity(collection, document.to_proxy()) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): crawl_directory(collection, child, document)
import os import logging from servicelayer.jobs import Job from aleph.core import db, archive from aleph.model import Document from aleph.queues import ingest_entity log = logging.getLogger(__name__) def crawl_directory(collection, path, parent=None, job_id=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): content_hash = archive.archive_file(path) foreign_id = path.name if parent is not None: foreign_id = os.path.join(parent.foreign_id, foreign_id) meta = {'file_name': path.name} document = Document.save(collection, parent=parent, foreign_id=foreign_id, content_hash=content_hash, meta=meta) db.session.commit() job_id = job_id or Job.random_id() ingest_entity(collection, document.to_proxy(), job_id=job_id) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): crawl_directory(collection, child, document, job_id)
Make stable job IDs in ingest runs
Make stable job IDs in ingest runs
Python
mit
alephdata/aleph,pudo/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph,alephdata/aleph,alephdata/aleph,pudo/aleph
--- +++ @@ -1,5 +1,6 @@ import os import logging +from servicelayer.jobs import Job from aleph.core import db, archive from aleph.model import Document @@ -8,7 +9,7 @@ log = logging.getLogger(__name__) -def crawl_directory(collection, path, parent=None): +def crawl_directory(collection, path, parent=None, job_id=None): """Crawl the contents of the given path.""" content_hash = None if not path.is_dir(): @@ -23,8 +24,9 @@ content_hash=content_hash, meta=meta) db.session.commit() - ingest_entity(collection, document.to_proxy()) + job_id = job_id or Job.random_id() + ingest_entity(collection, document.to_proxy(), job_id=job_id) log.info("Crawl [%s]: %s -> %s", collection.id, path, document.id) if path.is_dir(): for child in path.iterdir(): - crawl_directory(collection, child, document) + crawl_directory(collection, child, document, job_id)
ae2f1014bbe83d64f17fee6a9ebd2c12cdc9a1bf
app/main/errors.py
app/main/errors.py
from flask import render_template from app.main import main @main.app_errorhandler(400) def bad_request(e): return render_template("errors/500.html", **main.config['BASE_TEMPLATE_DATA']), 400 @main.app_errorhandler(404) def page_not_found(e): return render_template("errors/404.html", **main.config['BASE_TEMPLATE_DATA']), 404 @main.app_errorhandler(500) def exception(e): return render_template("errors/500.html", **main.config['BASE_TEMPLATE_DATA']), 500 @main.app_errorhandler(503) def service_unavailable(e): return render_template("errors/500.html", **main.config['BASE_TEMPLATE_DATA']), 503
from flask import render_template from app.main import main from dmutils.apiclient import APIError @main.app_errorhandler(APIError) def api_error(e): return _render_error_template(e.status_code) @main.app_errorhandler(400) def bad_request(e): return _render_error_template(400) @main.app_errorhandler(404) def page_not_found(e): return _render_error_template(404) @main.app_errorhandler(500) def exception(e): return _render_error_template(500) @main.app_errorhandler(503) def service_unavailable(e): return _render_error_template(503) def _render_error_template(status_code): return render_template( _get_template(status_code), **main.config['BASE_TEMPLATE_DATA'] ), status_code def _get_template(status_code): if status_code == 404: return "errors/404.html" else: return "errors/500.html"
Add APIError flask error handler
Add APIError flask error handler This is modelled after the similar change in the supplier frontend https://github.com/alphagov/digitalmarketplace-supplier-frontend/commit/233f8840d55cadb9fb7fe60ff12c53b0f59f23a5
Python
mit
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
--- +++ @@ -1,26 +1,42 @@ from flask import render_template from app.main import main +from dmutils.apiclient import APIError + + +@main.app_errorhandler(APIError) +def api_error(e): + return _render_error_template(e.status_code) @main.app_errorhandler(400) def bad_request(e): - return render_template("errors/500.html", - **main.config['BASE_TEMPLATE_DATA']), 400 + return _render_error_template(400) @main.app_errorhandler(404) def page_not_found(e): - return render_template("errors/404.html", - **main.config['BASE_TEMPLATE_DATA']), 404 + return _render_error_template(404) @main.app_errorhandler(500) def exception(e): - return render_template("errors/500.html", - **main.config['BASE_TEMPLATE_DATA']), 500 + return _render_error_template(500) @main.app_errorhandler(503) def service_unavailable(e): - return render_template("errors/500.html", - **main.config['BASE_TEMPLATE_DATA']), 503 + return _render_error_template(503) + + +def _render_error_template(status_code): + return render_template( + _get_template(status_code), + **main.config['BASE_TEMPLATE_DATA'] + ), status_code + + +def _get_template(status_code): + if status_code == 404: + return "errors/404.html" + else: + return "errors/500.html"
3cf44a081f6dc824e1ff0639f424c0502fa6fe39
pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py
pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='Set the value for the fixture "bar".' ) @pytest.fixture def bar(request): return request.config.option.foo
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', default={{cookiecutter.year}}, help='Set the value for the fixture "bar".' ) @pytest.fixture def bar(request): return request.config.option.foo
Add a default to the foo option
Add a default to the foo option
Python
mit
s0undt3ch/cookiecutter-pytest-plugin,pytest-dev/cookiecutter-pytest-plugin,luzfcb/cookiecutter-pytest-plugin
--- +++ @@ -9,6 +9,7 @@ '--foo', action='store', dest='foo', + default={{cookiecutter.year}}, help='Set the value for the fixture "bar".' )
095d8d0136ff3942a9fcc76564a61e17dae56b71
goldprice.py
goldprice.py
#!/usr/bin/python # Maybank Gold Investment Account price scraper # Using BeautifulSoup package # Developed and tested on Debian Testing (Jessie) # Initial development 25 July 2012 # Copyright (C) 2012,2013 Sharuzzaman Ahmat Raslan (sharuzzaman@gmail.com) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import urllib2 from BeautifulSoup import BeautifulSoup import datetime website=urllib2.urlopen('http://www.maybank2u.com.my/mbbfrx/gold_rate.htm') data=website.read() soup = BeautifulSoup(data) date=soup('td')[31].string selling=soup('td')[32].string buying=soup('td')[33].string print "%s,%s,%s" % (date,selling,buying)
#!/usr/bin/python # Maybank Gold Investment Account price scraper # Using BeautifulSoup package # Developed and tested on Debian Testing (Jessie) # Initial development 25 July 2012 # Copyright (C) 2012,2013 Sharuzzaman Ahmat Raslan (sharuzzaman@gmail.com) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import urllib2 from BeautifulSoup import BeautifulSoup import datetime #maybank website looking for user-agent header req = urllib2.Request('http://www.maybank2u.com.my/mbbfrx/gold_rate.htm') req.add_header('User-Agent', 'Mozilla') website=urllib2.urlopen(req) data=website.read() soup = BeautifulSoup(data) date=soup('td')[31].string selling=soup('td')[32].string buying=soup('td')[33].string print "%s,%s,%s" % (date,selling,buying)
Fix breakage. The website is looking for user-agent header
Fix breakage. The website is looking for user-agent header
Python
agpl-3.0
sharuzzaman/sharuzzaman-code-repo.maybank-gia-rate
--- +++ @@ -24,7 +24,10 @@ from BeautifulSoup import BeautifulSoup import datetime -website=urllib2.urlopen('http://www.maybank2u.com.my/mbbfrx/gold_rate.htm') +#maybank website looking for user-agent header +req = urllib2.Request('http://www.maybank2u.com.my/mbbfrx/gold_rate.htm') +req.add_header('User-Agent', 'Mozilla') +website=urllib2.urlopen(req) data=website.read() soup = BeautifulSoup(data)