commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
5cef7a34be1940e53676404b560207bfcbe10a63
setup.py
setup.py
#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="3.0.1", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} )
#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="4.0", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} )
Bump version number to 4.0 to reflect major improvements.
Bump version number to 4.0 to reflect major improvements.
Python
mit
ebroder/python-moira
#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="3.0.1", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} ) Bump version number to 4.0 to reflect major improvements.
#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="4.0", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} )
<commit_before>#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="3.0.1", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} ) <commit_msg>Bump version number to 4.0 to reflect major improvements.<commit_after>
#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="4.0", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} )
#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="3.0.1", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} ) Bump version number to 4.0 to reflect major improvements.#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="4.0", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} )
<commit_before>#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="3.0.1", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} ) <commit_msg>Bump version number to 4.0 to reflect major improvements.<commit_after>#!/usr/bin/python from setuptools import setup from distutils.extension import Extension from Pyrex.Distutils import build_ext setup( name="PyMoira", version="4.0", description="PyMoira - Python bindings for the Athena Moira library", author="Evan Broder", author_email="broder@mit.edu", license="MIT", py_modules=['moira'], ext_modules=[ Extension("_moira", ["_moira.pyx"], libraries=["moira", "krb5", "krb4", "hesiod"]) ], cmdclass= {"build_ext": build_ext} )
153f0e18df03d7ff7b691d2bd454468933ef50b0
setup.py
setup.py
import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] )
import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] )
Add python_requires to help pip
Add python_requires to help pip
Python
mit
sendgrid/python-http-client,sendgrid/python-http-client
import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] ) Add python_requires to help pip
import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] )
<commit_before>import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] ) <commit_msg>Add python_requires to help pip<commit_after>
import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] )
import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] ) Add python_requires to help pipimport os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] )
<commit_before>import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] ) <commit_msg>Add python_requires to help pip<commit_after>import os from setuptools import setup long_description = 'Please see our GitHub README' if os.path.exists('README.txt'): long_description = open('README.txt').read() base_url = 'https://github.com/sendgrid/' version = '3.1.0' setup( name='python_http_client', version=version, author='Elmer Thomas', author_email='dx@sendgrid.com', url='{}python-http-client'.format(base_url), download_url='{}python-http-client/tarball/{}'.format(base_url, version), packages=['python_http_client'], license='MIT', description='HTTP REST client, simplified for Python', long_description=long_description, keywords=[ 'REST', 'HTTP', 'API'], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ] )
0feca08806c280e0c8ed5f6b26dda36d01f5467d
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter, util class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH,'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua"
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH, 'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua"
Remove unused util ref and add a space after , for Travis
Remove unused util ref and add a space after , for Travis
Python
mit
Sinaloit/SublimeLinter-contrib-lua-globals
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter, util class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH,'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua" Remove unused util ref and add a space after , for Travis
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH, 'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua"
<commit_before># # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter, util class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH,'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua" <commit_msg>Remove unused util ref and add a space after , for Travis<commit_after>
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH, 'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua"
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter, util class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH,'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua" Remove unused util ref and add a space after , for Travis# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH, 'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua"
<commit_before># # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter, util class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH,'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua" <commit_msg>Remove unused util ref and add a space after , for Travis<commit_after># # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Patrick Kish # Copyright (c) 2014 Patrick Kish # # License: MIT # """This module exports the LuaGlobals plugin class.""" from os.path import dirname, realpath, join as pathjoin FOLDER_PATH = dirname(realpath(__file__)) from SublimeLinter.lint import Linter class LuaGlobals(Linter): """Provides an interface to lua-globals.""" syntax = 'lua' script_path = pathjoin(FOLDER_PATH, 'findglobals.lua') cmd = 'lua "' + script_path + '" "@"' regex = ( r'\s*\[(?P<line>\d+)\]\s+' r'((?P<warning>G:)|(?P<error>S:))' r'(?P<message>.+?(?::\s(?P<near>.*)|$))' ) tempfile_suffix = "lua"
9fe3e814e2a74e38fe960fab333b09cf8f00e1b0
setup.py
setup.py
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], )
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
Add License classifier for pypa
Add License classifier for pypa
Python
bsd-3-clause
joshy/striprtf
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], ) Add License classifier for pypa
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
<commit_before>import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], ) <commit_msg>Add License classifier for pypa<commit_after>
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], ) Add License classifier for pypaimport io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
<commit_before>import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[], ) <commit_msg>Add License classifier for pypa<commit_after>import io from os import path from setuptools import setup this_directory = path.abspath(path.dirname(__file__)) with io.open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() about = {} with io.open("striprtf/_version.py", "r", encoding="utf-8") as f: exec(f.read(), about) setup( name="striprtf", packages=["striprtf"], version=about["__version__"], description="A simple library to convert rtf to text", long_description=long_description, long_description_content_type="text/markdown", author="Joshy Cyriac", author_email="joshy@posteo.ch", url="https://github.com/joshy/striprtf", download_url="https://github.com/joshy/striprtf/archive/v%s.tar.gz" % about["__version__"], keywords=["rtf"], scripts=["striprtf/striprtf"], classifiers=[ "License :: OSI Approved :: BSD License" ], )
396128055f65bdff731b3d70e4a47354e63c6d67
setup.py
setup.py
import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client==1.0.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client>=1.0.0,<2.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Change algorithmia-api-client dependency from a version to a version range
Change algorithmia-api-client dependency from a version to a version range
Python
mit
algorithmiaio/algorithmia-python
import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client==1.0.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) Change algorithmia-api-client dependency from a version to a version range
import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client>=1.0.0,<2.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
<commit_before>import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client==1.0.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) <commit_msg>Change algorithmia-api-client dependency from a version to a version range<commit_after>
import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client>=1.0.0,<2.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client==1.0.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) Change algorithmia-api-client dependency from a version to a version rangeimport os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client>=1.0.0,<2.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
<commit_before>import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client==1.0.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) <commit_msg>Change algorithmia-api-client dependency from a version to a version range<commit_after>import os from setuptools import setup setup( name='algorithmia', version='1.1.4', description='Algorithmia Python Client', long_description='Algorithmia Python Client is a client library for accessing Algorithmia from python code. This library also gets bundled with any Python algorithms in Algorithmia.', url='http://github.com/algorithmiaio/algorithmia-python', license='MIT', author='Algorithmia', author_email='support@algorithmia.com', packages=['Algorithmia'], install_requires=[ 'requests', 'six', 'enum34', 'algorithmia-api-client>=1.0.0,<2.0' ], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
0523c622f5012d161be197ee856e9ab767ea4101
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.56', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, )
#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.57', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, )
Upgrade to prompt_toolkit 0.57. (Should give much better performance.)
Upgrade to prompt_toolkit 0.57. (Should give much better performance.)
Python
bsd-3-clause
ABaldwinHunter/pymux-clone,jonathanslenders/pymux
#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.56', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, ) Upgrade to prompt_toolkit 0.57. (Should give much better performance.)
#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.57', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, )
<commit_before>#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.56', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, ) <commit_msg>Upgrade to prompt_toolkit 0.57. (Should give much better performance.)<commit_after>
#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.57', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, )
#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.56', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, ) Upgrade to prompt_toolkit 0.57. (Should give much better performance.)#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.57', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, )
<commit_before>#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.56', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, ) <commit_msg>Upgrade to prompt_toolkit 0.57. (Should give much better performance.)<commit_after>#!/usr/bin/env python import os from setuptools import setup, find_packages long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' ) ).read() setup( name='pymux', author='Jonathan Slenders', version='0.3', license='LICENSE', url='https://github.com/jonathanslenders/', description='Pure Python terminal multiplexer.', long_description=long_description, packages=find_packages('.'), install_requires = [ 'prompt_toolkit==0.57', 'pyte>=0.4.10', 'six>=1.9.0', 'docopt>=0.6.2', ], entry_points={ 'console_scripts': [ 'pymux = pymux.entry_points.run_pymux:run', ] }, )
dd7e4fa5e1e92119567a4fef81db36aec2fe378b
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.2', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ])
#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.20', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ])
Update version number to 0.20, because 0.2 < 0.15
Update version number to 0.20, because 0.2 < 0.15
Python
mit
Ethal/enocean,Ethal/enocean,kipe/enocean,kipe/enocean
#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.2', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ]) Update version number to 0.20, because 0.2 < 0.15
#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.20', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ])
<commit_before>#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.2', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ]) <commit_msg>Update version number to 0.20, because 0.2 < 0.15<commit_after>
#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.20', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ])
#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.2', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ]) Update version number to 0.20, because 0.2 < 0.15#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.20', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ])
<commit_before>#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.2', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ]) <commit_msg>Update version number to 0.20, because 0.2 < 0.15<commit_after>#!/usr/bin/env python from distutils.core import setup setup( name='enocean', version='0.20', description='EnOcean serial protocol implementation', author='Kimmo Huoman', author_email='kipenroskaposti@gmail.com', url='https://github.com/kipe/enocean', packages=[ 'enocean', 'enocean.protocol', 'enocean.communicators', ], scripts=[ 'examples/enocean_example.py', ], package_data={ '': ['EEP_2.6.1.xml'] }, install_requires=[ 'enum34>=1.0', 'pyserial>=2.7', 'beautifulsoup4>=4.3.2', ])
05c057b44460eea6f6fe4a3dd891038d65e6d781
naxos/naxos/settings/secretKeyGen.py
naxos/naxos/settings/secretKeyGen.py
""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: SECRET_KEY = open(SECRET_FILE).read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE))
""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: with open(SECRET_FILE) as f: SECRET_KEY = f.read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE))
Fix not closed file warning
fix: Fix not closed file warning
Python
apache-2.0
maur1th/naxos,maur1th/naxos,maur1th/naxos,maur1th/naxos
""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: SECRET_KEY = open(SECRET_FILE).read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE)) fix: Fix not closed file warning
""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: with open(SECRET_FILE) as f: SECRET_KEY = f.read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE))
<commit_before>""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: SECRET_KEY = open(SECRET_FILE).read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE)) <commit_msg>fix: Fix not closed file warning<commit_after>
""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: with open(SECRET_FILE) as f: SECRET_KEY = f.read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE))
""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: SECRET_KEY = open(SECRET_FILE).read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE)) fix: Fix not closed file warning""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: with open(SECRET_FILE) as f: SECRET_KEY = f.read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE))
<commit_before>""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: SECRET_KEY = open(SECRET_FILE).read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE)) <commit_msg>fix: Fix not closed file warning<commit_after>""" Two things are wrong with Django's default `SECRET_KEY` system: 1. It is not random but pseudo-random 2. It saves and displays the SECRET_KEY in `settings.py` This snippet 1. uses `SystemRandom()` instead to generate a random key 2. saves a local `secret.txt` The result is a random and safely hidden `SECRET_KEY`. From https://gist.github.com/ndarville/3452907 Edited by Thomas Maurin, 2014 to run on python3 """ try: SECRET_KEY except NameError: from os.path import join from .base import BASE_DIR SECRET_FILE = join(BASE_DIR, 'secret.txt') try: with open(SECRET_FILE) as f: SECRET_KEY = f.read().strip() except IOError: try: import random SECRET_KEY = ''.join([random.SystemRandom().choice( 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) secret = open(SECRET_FILE, 'w') secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a {:s} file with random characters \ to generate your secret key!'.format(SECRET_FILE))
9d6eb0b5f5d6c6ebdb17a0e1fe8f468acb8fd7bd
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=2.7, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], )
#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=3.0, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], )
Increment version. As the api has changed Major version number update
Increment version. As the api has changed Major version number update
Python
apache-2.0
Bachmann1234/marshmallow-polyfield
#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=2.7, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], ) Increment version. As the api has changed Major version number update
#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=3.0, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], )
<commit_before>#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=2.7, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], ) <commit_msg>Increment version. As the api has changed Major version number update<commit_after>
#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=3.0, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], )
#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=2.7, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], ) Increment version. As the api has changed Major version number update#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=3.0, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], )
<commit_before>#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=2.7, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], ) <commit_msg>Increment version. As the api has changed Major version number update<commit_after>#!/usr/bin/env python import os from setuptools import setup def read(fname): with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile: content = infile.read() return content setup( name='marshmallow-polyfield', version=3.0, description='An unofficial extension to Marshmallow to allow for polymorphic fields', long_description=read('README.rst'), author='Matt Bachmann', author_email='bachmann.matt@gmail.com', url='https://github.com/Bachmann1234/marshmallow-polyfield', packages=['marshmallow_polyfield', 'tests'], license=read('LICENSE'), keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema'), install_requires=['marshmallow>=2.0.0'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], )
1931ef378f0abc32181150be1ca31110c0e98459
setup.py
setup.py
#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1",], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : "image_viewer = vtimshow:VtImageViewer" } )
#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1", "pyqtgraph"], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : defaults.UID +" = vtimshow:VtImageViewer" } )
Add dependency and correct name
Add dependency and correct name
Python
mit
kprussing/vtimshow
#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1",], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : "image_viewer = vtimshow:VtImageViewer" } ) Add dependency and correct name
#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1", "pyqtgraph"], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : defaults.UID +" = vtimshow:VtImageViewer" } )
<commit_before>#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1",], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : "image_viewer = vtimshow:VtImageViewer" } ) <commit_msg>Add dependency and correct name<commit_after>
#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1", "pyqtgraph"], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : defaults.UID +" = vtimshow:VtImageViewer" } )
#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1",], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : "image_viewer = vtimshow:VtImageViewer" } ) Add dependency and correct name#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1", "pyqtgraph"], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : defaults.UID +" = vtimshow:VtImageViewer" } )
<commit_before>#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1",], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : "image_viewer = vtimshow:VtImageViewer" } ) <commit_msg>Add dependency and correct name<commit_after>#!/usr/bin/env python3 import os from setuptools import setup, find_packages from vtimshow import defaults def read(fname): """ Utility function to read a file. """ return open(fname, "r").read().strip() setup( name = "vtimshow", version = read(os.path.join(os.path.dirname(__file__), "VERSION")), packages = find_packages(), description = defaults.COMMENT, long_description = read( os.path.join(os.path.dirname(__file__), "README.txt") ), author = defaults.AUTHOR, author_email = defaults.AUTHOR_EMAIL, license = defaults.LICENSE, install_requires = ["ViTables >2.1", "pyqtgraph"], dependency_links = [ "https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version" ], entry_points = { "vitables.plugins" : defaults.UID +" = vtimshow:VtImageViewer" } )
52f488eb54ccd250d1ba2c67ae0d9ebb4c101ec8
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='0.1', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='1.0', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], )
Increment version - go ahead and call it 1.0, even.
Increment version - go ahead and call it 1.0, even.
Python
bsd-3-clause
adamfast/geodjango_timezones,adamfast/geodjango_timezones
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='0.1', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], ) Increment version - go ahead and call it 1.0, even.
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='1.0', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='0.1', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], ) <commit_msg>Increment version - go ahead and call it 1.0, even.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='1.0', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='0.1', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], ) Increment version - go ahead and call it 1.0, even.#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='1.0', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='0.1', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], ) <commit_msg>Increment version - go ahead and call it 1.0, even.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='geodjango-timezones', version='1.0', description='Models to store and scripts to load timezone shapefiles to be usable inside a GeoDjango application.', author='Adam Fast', author_email='', url='https://github.com/adamfast/geodjango_timezones', packages=find_packages(), package_data={ }, include_package_data=True, install_requires=['pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities' ], )
09a18ba1b2ac5517b37b524fb2c7d7e2917ed251
words.py
words.py
""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result)
""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result) if __name__ == "__main__": choose((3, 8))
Add a main guard for testing
Add a main guard for testing
Python
mit
tml/python-hangman-2017-summer
""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result)Add a main guard for testing
""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result) if __name__ == "__main__": choose((3, 8))
<commit_before>""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result)<commit_msg>Add a main guard for testing<commit_after>
""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result) if __name__ == "__main__": choose((3, 8))
""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result)Add a main guard for testing""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result) if __name__ == "__main__": choose((3, 8))
<commit_before>""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result)<commit_msg>Add a main guard for testing<commit_after>""" Pick a word from /usr/share/dict/words """ import subprocess from sys import exit import random def choose(difficulty): (min, max) = difficulty cmd = "/usr/bin/grep -E '^.{{{},{}}}$' /usr/share/dict/words".format(min, max) obj = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) result = obj.stdout.decode('utf-8').strip().split("\n") return random.choice(result) if __name__ == "__main__": choose((3, 8))
0af3b589c6c271d07ad4e204fa41aa0fed167a94
thinglang/parser/constructs/cast_operation.py
thinglang/parser/constructs/cast_operation.py
from thinglang.lexer.values.identifier import Identifier from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall class CastOperation(object): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS)
from thinglang.lexer.operators.casts import LexicalCast from thinglang.lexer.values.identifier import Identifier from thinglang.parser.nodes import BaseNode from thinglang.parser.rule import ParserRule from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall from thinglang.utils.type_descriptors import ValueType class CastOperation(BaseNode): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) @staticmethod @ParserRule.mark def parse_inline_cast_op(value: ValueType, _: LexicalCast, target_type: Identifier): return MethodCall(Access([value, Identifier('convert_') + target_type]), [])
Add explicit parsing rule for cast operations
Add explicit parsing rule for cast operations
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
from thinglang.lexer.values.identifier import Identifier from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall class CastOperation(object): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) Add explicit parsing rule for cast operations
from thinglang.lexer.operators.casts import LexicalCast from thinglang.lexer.values.identifier import Identifier from thinglang.parser.nodes import BaseNode from thinglang.parser.rule import ParserRule from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall from thinglang.utils.type_descriptors import ValueType class CastOperation(BaseNode): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) @staticmethod @ParserRule.mark def parse_inline_cast_op(value: ValueType, _: LexicalCast, target_type: Identifier): return MethodCall(Access([value, Identifier('convert_') + target_type]), [])
<commit_before>from thinglang.lexer.values.identifier import Identifier from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall class CastOperation(object): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) <commit_msg>Add explicit parsing rule for cast operations<commit_after>
from thinglang.lexer.operators.casts import LexicalCast from thinglang.lexer.values.identifier import Identifier from thinglang.parser.nodes import BaseNode from thinglang.parser.rule import ParserRule from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall from thinglang.utils.type_descriptors import ValueType class CastOperation(BaseNode): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) @staticmethod @ParserRule.mark def parse_inline_cast_op(value: ValueType, _: LexicalCast, target_type: Identifier): return MethodCall(Access([value, Identifier('convert_') + target_type]), [])
from thinglang.lexer.values.identifier import Identifier from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall class CastOperation(object): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) Add explicit parsing rule for cast operationsfrom thinglang.lexer.operators.casts import LexicalCast from thinglang.lexer.values.identifier import Identifier from thinglang.parser.nodes import BaseNode from thinglang.parser.rule import ParserRule from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall from thinglang.utils.type_descriptors import ValueType class CastOperation(BaseNode): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) @staticmethod @ParserRule.mark def parse_inline_cast_op(value: ValueType, _: LexicalCast, target_type: Identifier): return MethodCall(Access([value, Identifier('convert_') + target_type]), [])
<commit_before>from thinglang.lexer.values.identifier import Identifier from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall class CastOperation(object): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) <commit_msg>Add explicit parsing rule for cast operations<commit_after>from thinglang.lexer.operators.casts import LexicalCast from thinglang.lexer.values.identifier import Identifier from thinglang.parser.nodes import BaseNode from thinglang.parser.rule import ParserRule from thinglang.parser.values.access import Access from thinglang.parser.values.method_call import MethodCall from thinglang.utils.type_descriptors import ValueType class CastOperation(BaseNode): """ Explicitly cast from one type to another Expects a conversion method on the source class """ @staticmethod def create(source: Identifier, destination: Identifier) -> MethodCall: return MethodCall(Access([source, Identifier('convert_') + destination]), MethodCall.STACK_ARGS) @staticmethod @ParserRule.mark def parse_inline_cast_op(value: ValueType, _: LexicalCast, target_type: Identifier): return MethodCall(Access([value, Identifier('convert_') + target_type]), [])
b17e6bd8d067951f28422c94aad628d031e334fd
fuzzers/011-ffconfig/generate.py
fuzzers/011-ffconfig/generate.py
#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) segmk.addtag(site, "%s.ZINI" % bel, 1-init) # segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.compile() segmk.write(sys.argv[1])
#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) if False: segmk.addtag(site, "%s.TYPE_%s" % (bel, ctype), 1) for i in range(1, 15): types = set() if i & 1: types.add("FDCE") if i & 2: types.add("FDPE") if i & 4: types.add("FDRE") if i & 8: types.add("FDSE") segmk.addtag(site, "%s.TYPES_%s" % (bel, "_".join(sorted(types))), ctype in types) if False: segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.addtag(site, "%s.ZINI" % bel, 1-init) segmk.compile() segmk.write(sys.argv[1])
Add more tags to ffconfig fuzzer (currently disabled)
Add more tags to ffconfig fuzzer (currently disabled) Signed-off-by: Clifford Wolf <b28e7ff75903c06de987a1eb75ea100a79c89e3a@clifford.at> Signed-off-by: Tim 'mithro' Ansell <57310ee00039176189a7bd7b876cda4d0d2a19aa@mithis.com>
Python
isc
SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray
#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) segmk.addtag(site, "%s.ZINI" % bel, 1-init) # segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.compile() segmk.write(sys.argv[1]) Add more tags to ffconfig fuzzer (currently disabled) Signed-off-by: Clifford Wolf <b28e7ff75903c06de987a1eb75ea100a79c89e3a@clifford.at> Signed-off-by: Tim 'mithro' Ansell <57310ee00039176189a7bd7b876cda4d0d2a19aa@mithis.com>
#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) if False: segmk.addtag(site, "%s.TYPE_%s" % (bel, ctype), 1) for i in range(1, 15): types = set() if i & 1: types.add("FDCE") if i & 2: types.add("FDPE") if i & 4: types.add("FDRE") if i & 8: types.add("FDSE") segmk.addtag(site, "%s.TYPES_%s" % (bel, "_".join(sorted(types))), ctype in types) if False: segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.addtag(site, "%s.ZINI" % bel, 1-init) segmk.compile() segmk.write(sys.argv[1])
<commit_before>#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) segmk.addtag(site, "%s.ZINI" % bel, 1-init) # segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.compile() segmk.write(sys.argv[1]) <commit_msg>Add more tags to ffconfig fuzzer (currently disabled) Signed-off-by: Clifford Wolf <b28e7ff75903c06de987a1eb75ea100a79c89e3a@clifford.at> Signed-off-by: Tim 'mithro' Ansell <57310ee00039176189a7bd7b876cda4d0d2a19aa@mithis.com><commit_after>
#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) if False: segmk.addtag(site, "%s.TYPE_%s" % (bel, ctype), 1) for i in range(1, 15): types = set() if i & 1: types.add("FDCE") if i & 2: types.add("FDPE") if i & 4: types.add("FDRE") if i & 8: types.add("FDSE") segmk.addtag(site, "%s.TYPES_%s" % (bel, "_".join(sorted(types))), ctype in types) if False: segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.addtag(site, "%s.ZINI" % bel, 1-init) segmk.compile() segmk.write(sys.argv[1])
#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) segmk.addtag(site, "%s.ZINI" % bel, 1-init) # segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.compile() segmk.write(sys.argv[1]) Add more tags to ffconfig fuzzer (currently disabled) Signed-off-by: Clifford Wolf <b28e7ff75903c06de987a1eb75ea100a79c89e3a@clifford.at> Signed-off-by: Tim 'mithro' Ansell <57310ee00039176189a7bd7b876cda4d0d2a19aa@mithis.com>#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) if False: segmk.addtag(site, "%s.TYPE_%s" % (bel, ctype), 1) for i in range(1, 15): types = set() if i & 1: types.add("FDCE") if i & 2: types.add("FDPE") if i & 4: types.add("FDRE") if i & 8: types.add("FDSE") segmk.addtag(site, "%s.TYPES_%s" % (bel, "_".join(sorted(types))), ctype in types) if False: segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.addtag(site, "%s.ZINI" % bel, 1-init) segmk.compile() segmk.write(sys.argv[1])
<commit_before>#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) segmk.addtag(site, "%s.ZINI" % bel, 1-init) # segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.compile() segmk.write(sys.argv[1]) <commit_msg>Add more tags to ffconfig fuzzer (currently disabled) Signed-off-by: Clifford Wolf <b28e7ff75903c06de987a1eb75ea100a79c89e3a@clifford.at> Signed-off-by: Tim 'mithro' Ansell <57310ee00039176189a7bd7b876cda4d0d2a19aa@mithis.com><commit_after>#!/usr/bin/env python3 import sys, re sys.path.append("../../../utils/") from segmaker import segmaker segmk = segmaker("design_%s.bits" % sys.argv[1]) print("Loading tags from design_%s.txt." % sys.argv[1]) with open("design_%s.txt" % sys.argv[1], "r") as f: for line in f: line = line.split() site = line[0] bel = line[1] ctype = line[2] init = int(line[3][3]) cinv = int(line[4][3]) if False: segmk.addtag(site, "%s.TYPE_%s" % (bel, ctype), 1) for i in range(1, 15): types = set() if i & 1: types.add("FDCE") if i & 2: types.add("FDPE") if i & 4: types.add("FDRE") if i & 8: types.add("FDSE") segmk.addtag(site, "%s.TYPES_%s" % (bel, "_".join(sorted(types))), ctype in types) if False: segmk.addtag(site, "%s.CLOCK_INV" % (bel.split(".")[0]), cinv) segmk.addtag(site, "%s.ZINI" % bel, 1-init) segmk.compile() segmk.write(sys.argv[1])
01dd4901532df4f3da51501d4f223c873dd49dd8
ideascube/tests/test_settings.py
ideascube/tests/test_settings.py
import glob import os import importlib import pytest @pytest.fixture(params=glob.glob('ideascube/conf/*.py')) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name))
import glob import os import importlib import pytest @pytest.fixture(params=sorted([ f for f in glob.glob('ideascube/conf/*.py') if not f.endswith('/__init__.py') ])) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name))
Improve the settings files testing fixture
tests: Improve the settings files testing fixture Let's order these files, as it makes it nicer in the pytest output. In addition, we can filter out the __init__.py file, since it is completely empty.
Python
agpl-3.0
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
import glob import os import importlib import pytest @pytest.fixture(params=glob.glob('ideascube/conf/*.py')) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name)) tests: Improve the settings files testing fixture Let's order these files, as it makes it nicer in the pytest output. In addition, we can filter out the __init__.py file, since it is completely empty.
import glob import os import importlib import pytest @pytest.fixture(params=sorted([ f for f in glob.glob('ideascube/conf/*.py') if not f.endswith('/__init__.py') ])) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name))
<commit_before>import glob import os import importlib import pytest @pytest.fixture(params=glob.glob('ideascube/conf/*.py')) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name)) <commit_msg>tests: Improve the settings files testing fixture Let's order these files, as it makes it nicer in the pytest output. In addition, we can filter out the __init__.py file, since it is completely empty.<commit_after>
import glob import os import importlib import pytest @pytest.fixture(params=sorted([ f for f in glob.glob('ideascube/conf/*.py') if not f.endswith('/__init__.py') ])) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name))
import glob import os import importlib import pytest @pytest.fixture(params=glob.glob('ideascube/conf/*.py')) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name)) tests: Improve the settings files testing fixture Let's order these files, as it makes it nicer in the pytest output. In addition, we can filter out the __init__.py file, since it is completely empty.import glob import os import importlib import pytest @pytest.fixture(params=sorted([ f for f in glob.glob('ideascube/conf/*.py') if not f.endswith('/__init__.py') ])) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name))
<commit_before>import glob import os import importlib import pytest @pytest.fixture(params=glob.glob('ideascube/conf/*.py')) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name)) <commit_msg>tests: Improve the settings files testing fixture Let's order these files, as it makes it nicer in the pytest output. In addition, we can filter out the __init__.py file, since it is completely empty.<commit_after>import glob import os import importlib import pytest @pytest.fixture(params=sorted([ f for f in glob.glob('ideascube/conf/*.py') if not f.endswith('/__init__.py') ])) def setting_module(request): basename = os.path.basename(request.param) module, _ = os.path.splitext(basename) return '.conf.%s' % module def test_setting_file(setting_module): from ideascube.forms import UserImportForm settings = importlib.import_module(setting_module, package="ideascube") assert isinstance(getattr(settings, 'IDEASCUBE_NAME', ''), str) for name, _ in getattr(settings, 'USER_IMPORT_FORMATS', []): assert hasattr(UserImportForm, '_get_{}_mapping'.format(name)) assert hasattr(UserImportForm, '_get_{}_reader'.format(name))
25b9818da4b1922d808812bb43a9c1b35c277b7e
integration-test/1687-fewer-places-at-low-zoom.py
integration-test/1687-fewer-places-at-low-zoom.py
# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 2 (one less than the min zoom) self.assert_no_matching_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, })
# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should exist at zoom 2 (one past the min zoom) self.assert_has_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, })
Revert "update Guam to not show at zoom 2"
Revert "update Guam to not show at zoom 2" This reverts commit b12f1560f6b6284c9d26dab96a6c09eac1942424.
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 2 (one less than the min zoom) self.assert_no_matching_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, }) Revert "update Guam to not show at zoom 2" This reverts commit b12f1560f6b6284c9d26dab96a6c09eac1942424.
# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should exist at zoom 2 (one past the min zoom) self.assert_has_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, })
<commit_before># -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 2 (one less than the min zoom) self.assert_no_matching_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, }) <commit_msg>Revert "update Guam to not show at zoom 2" This reverts commit b12f1560f6b6284c9d26dab96a6c09eac1942424.<commit_after>
# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should exist at zoom 2 (one past the min zoom) self.assert_has_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, })
# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 2 (one less than the min zoom) self.assert_no_matching_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, }) Revert "update Guam to not show at zoom 2" This reverts commit b12f1560f6b6284c9d26dab96a6c09eac1942424.# -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should exist at zoom 2 (one past the min zoom) self.assert_has_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, })
<commit_before># -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 2 (one less than the min zoom) self.assert_no_matching_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, }) <commit_msg>Revert "update Guam to not show at zoom 2" This reverts commit b12f1560f6b6284c9d26dab96a6c09eac1942424.<commit_after># -*- encoding: utf-8 -*- from . import FixtureTest class LowZoomPlacesTest(FixtureTest): def test_zoom_1(self): import dsl z, x, y = (3, 7, 3) self.generate_fixtures( dsl.way(607976629, dsl.tile_centre_shape(z, x, y), { "min_zoom": 1, "__ne_max_zoom": 10, "__ne_min_zoom": 3, "area": 0, "place": "country", "name": "Guam", "population": 185427, "source": "openstreetmap.org", }), ) # should exist at zoom 3 (the min zoom from NE) self.assert_has_feature( z, x, y, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should exist at zoom 2 (one past the min zoom) self.assert_has_feature( z-1, x//2, y//2, 'places', { 'id': 607976629, 'kind': 'country', 'name': 'Guam', }) # should not exist at zoom 1 self.assert_no_matching_feature( z-2, x//4, y//4, 'places', { 'id': 607976629, })
9209c56661c2b14a09db339cf1551e536965ad7f
{{cookiecutter.extension_name}}/{{cookiecutter.extension_name}}/__init__.py
{{cookiecutter.extension_name}}/{{cookiecutter.extension_name}}/__init__.py
from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): @property def data(self): return self._data @data.setter def data(self, data): if isinstance(data, str): data = json.loads(data) self._data = data def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } display(bundle, raw=True)
from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): """A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel. {{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings. Scalar types (None, number, string) are not allowed, only dict containers. """ def _data_and_metadata(self): return self.data, self.metadata def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } metadata = { '{{cookiecutter.mime_type}}': self.metadata } display(bundle, metadata=metadata, raw=True)
Include display metadata in mime bundle
Include display metadata in mime bundle
Python
cc0-1.0
gnestor/mimerender-cookiecutter,gnestor/mimerender-cookiecutter,jupyterlab/mimerender-cookiecutter,jupyterlab/mimerender-cookiecutter
from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): @property def data(self): return self._data @data.setter def data(self, data): if isinstance(data, str): data = json.loads(data) self._data = data def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } display(bundle, raw=True) Include display metadata in mime bundle
from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): """A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel. {{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings. Scalar types (None, number, string) are not allowed, only dict containers. """ def _data_and_metadata(self): return self.data, self.metadata def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } metadata = { '{{cookiecutter.mime_type}}': self.metadata } display(bundle, metadata=metadata, raw=True)
<commit_before>from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): @property def data(self): return self._data @data.setter def data(self, data): if isinstance(data, str): data = json.loads(data) self._data = data def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } display(bundle, raw=True) <commit_msg>Include display metadata in mime bundle<commit_after>
from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): """A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel. {{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings. Scalar types (None, number, string) are not allowed, only dict containers. """ def _data_and_metadata(self): return self.data, self.metadata def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } metadata = { '{{cookiecutter.mime_type}}': self.metadata } display(bundle, metadata=metadata, raw=True)
from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): @property def data(self): return self._data @data.setter def data(self, data): if isinstance(data, str): data = json.loads(data) self._data = data def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } display(bundle, raw=True) Include display metadata in mime bundlefrom IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): """A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel. {{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings. Scalar types (None, number, string) are not allowed, only dict containers. """ def _data_and_metadata(self): return self.data, self.metadata def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } metadata = { '{{cookiecutter.mime_type}}': self.metadata } display(bundle, metadata=metadata, raw=True)
<commit_before>from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): @property def data(self): return self._data @data.setter def data(self, data): if isinstance(data, str): data = json.loads(data) self._data = data def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } display(bundle, raw=True) <commit_msg>Include display metadata in mime bundle<commit_after>from IPython.display import display, JSON import json # Running `npm run build` will create static resources in the static # directory of this Python package (and create that directory if necessary). def _jupyter_labextension_paths(): return [{ 'name': '{{cookiecutter.extension_name}}', 'src': 'static', }] def _jupyter_nbextension_paths(): return [{ 'section': 'notebook', 'src': 'static', 'dest': '{{cookiecutter.extension_name}}', 'require': '{{cookiecutter.extension_name}}/extension' }] # A display class that can be used within a notebook. E.g.: # from {{cookiecutter.extension_name}} import {{cookiecutter.mime_short_name}} # {{cookiecutter.mime_short_name}}(data) class {{cookiecutter.mime_short_name}}(JSON): """A display class for displaying {{cookiecutter.mime_short_name}} visualizations in the Jupyter Notebook and IPython kernel. {{cookiecutter.mime_short_name}} expects a JSON-able dict, not serialized JSON strings. Scalar types (None, number, string) are not allowed, only dict containers. """ def _data_and_metadata(self): return self.data, self.metadata def _ipython_display_(self): bundle = { '{{cookiecutter.mime_type}}': self.data, 'text/plain': '<{{cookiecutter.extension_name}}.{{cookiecutter.mime_short_name}} object>' } metadata = { '{{cookiecutter.mime_type}}': self.metadata } display(bundle, metadata=metadata, raw=True)
1045c46b48f9599faf85f259c0e9d5e89f11a700
website/views.py
website/views.py
from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 4 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html')
from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 3 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html')
Change number of news on frontpage
Change number of news on frontpage
Python
mit
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 4 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html') Change number of news on frontpage
from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 3 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html')
<commit_before>from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 4 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html') <commit_msg>Change number of news on frontpage<commit_after>
from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 3 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html')
from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 4 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html') Change number of news on frontpagefrom django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 3 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html')
<commit_before>from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 4 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html') <commit_msg>Change number of news on frontpage<commit_after>from django.shortcuts import render from news.models import Article, Event from door.models import DoorStatus from datetime import datetime from itertools import chain def index(request): number_of_news = 3 # Sorts the news to show the events nearest in future and then fill in with the newest articles event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1] article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)] news_list = list(chain(event_list, article_list)) try: door_status = DoorStatus.objects.get(name='hackerspace').status except DoorStatus.DoesNotExist: door_status = True context = { 'news_list': news_list, 'door_status': door_status, } return render(request, 'index.html', context) def test404(request): return render(request, '404.html')
666410191bffc7477188fcbcb2b5973b15580b0f
ydf/templating.py
ydf/templating.py
""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **(yaml_vars or {})) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
Handle case when YAML is empty.
Handle case when YAML is empty.
Python
apache-2.0
ahawker/ydf
""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env Handle case when YAML is empty.
""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **(yaml_vars or {})) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
<commit_before>""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env <commit_msg>Handle case when YAML is empty.<commit_after>
""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **(yaml_vars or {})) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env Handle case when YAML is empty.""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **(yaml_vars or {})) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
<commit_before>""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **yaml_vars) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env <commit_msg>Handle case when YAML is empty.<commit_after>""" ydf/templating ~~~~~~~~~~~~~~ Contains functions to be exported into the Jinja2 environment and accessible from templates. """ import jinja2 import os from ydf import instructions, __version__ DEFAULT_TEMPLATE_NAME = 'default.tpl' DEFAULT_TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates') def render_vars(yaml_vars): """ Build a dict containing all variables accessible to a template during the rendering process. This is a merge of the YAML variables parsed from the file + build variables defined by :mod:`~ydf` itself. :param yaml_vars: Parsed from the parsed YAML file. :return: Dict of all variables available to template. """ return dict(ydf=dict(version=__version__), **(yaml_vars or {})) def environ(path=DEFAULT_TEMPLATE_PATH, **kwargs): """ Build a Jinja2 environment for the given template directory path and options. :param path: Path to search for Jinja2 template files :param kwargs: Options to configure the environment :return: :class:`~jinja2.Environment` instance """ kwargs.setdefault('trim_blocks', True) kwargs.setdefault('lstrip_blocks', True) env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), **kwargs) env.globals[instructions.convert_instruction.__name__] = instructions.convert_instruction return env
54fbec96c1cd8f77731dbe4a42c2dc28c1953ffc
rouver/util.py
rouver/util.py
import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%"))
import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%~"))
Add tilde to urljoin safe characters
Add tilde to urljoin safe characters This is not necessary for Python 3.7, but still affects older Python versions.
Python
mit
srittau/rouver
import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%")) Add tilde to urljoin safe characters This is not necessary for Python 3.7, but still affects older Python versions.
import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%~"))
<commit_before>import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%")) <commit_msg>Add tilde to urljoin safe characters This is not necessary for Python 3.7, but still affects older Python versions.<commit_after>
import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%~"))
import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%")) Add tilde to urljoin safe characters This is not necessary for Python 3.7, but still affects older Python versions.import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%~"))
<commit_before>import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%")) <commit_msg>Add tilde to urljoin safe characters This is not necessary for Python 3.7, but still affects older Python versions.<commit_after>import re from urllib.parse import urljoin, quote from werkzeug import Request _url_scheme_re = re.compile(r"^[a-zA-Z][a-zA-Z0-9.+-]*:") def absolute_url(request: Request, path: str) -> str: """ Construct an absolute URL, using the request URL as base. Non-printable and non-ASCII characters in the path are encoded, but other characters, most notably slashes and percent signs are not encoded. Make sure to call urllib.parse.quote() on paths that can potentially contain such characters before passing them to absolute_url(). """ return urljoin(request.base_url, quote(path, safe="/:?&$@,;+=%~"))
f43519e2fc6faf9956febcf61185c789454a4f0f
personal_website/models.py
personal_website/models.py
from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
Fix for tests failing because of persisted database file for the next build
Fix for tests failing because of persisted database file for the next build
Python
mit
tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website
from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all() Fix for tests failing because of persisted database file for the next build
import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
<commit_before>from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all() <commit_msg>Fix for tests failing because of persisted database file for the next build<commit_after>
import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all() Fix for tests failing because of persisted database file for the next buildimport os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
<commit_before>from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all() <commit_msg>Fix for tests failing because of persisted database file for the next build<commit_after>import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
468feeb2fd4be6e8a2429518c46083fcad6fcf2b
falmer/studentgroups/queries.py
falmer/studentgroups/queries.py
import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-70).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id)
import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-90).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id)
Increase SG last sync for more overlap
Increase SG last sync for more overlap
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-70).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id) Increase SG last sync for more overlap
import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-90).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id)
<commit_before>import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-70).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id) <commit_msg>Increase SG last sync for more overlap<commit_after>
import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-90).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id)
import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-70).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id) Increase SG last sync for more overlapimport graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-90).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id)
<commit_before>import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-70).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id) <commit_msg>Increase SG last sync for more overlap<commit_after>import graphene import arrow from django.db.models import Q from falmer.schema.schema import DjangoConnectionField from falmer.studentgroups.types import StudentGroup from . import types from . import models class Query(graphene.ObjectType): all_groups = DjangoConnectionField(StudentGroup) group = graphene.Field(types.StudentGroup, group_id=graphene.Int()) def resolve_all_groups(self, info): qs = models.StudentGroup.objects \ .order_by('name') \ .select_related('msl_group', 'logo') \ .filter(Q(msl_group__last_sync__gte=arrow.now().shift(minutes=-90).datetime) | Q(msl_group__isnull=True)) return qs def resolve_group(self, info, **kwargs): group_id = kwargs.get('group_id') return models.StudentGroup.objects \ .select_related('logo').get(pk=group_id)
e8db81b563688d977a3090f6f2d4fc7efaa42323
tests/document_download/test_document_download.py
tests/document_download/test_document_download.py
import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( "{}/services/{}/documents".format(config['document_download']['api_host'], service_id), headers={ 'Authorization': "Bearer {}".format(config['document_download']['api_key']), }, files={ 'document': file_contents } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], '%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file'
import base64 import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( f"{config['document_download']['api_host']}/services/{service_id}/documents", headers={ 'Authorization': f"Bearer {config['document_download']['api_key']}", }, json={ 'document': base64.b64encode(file_contents).decode('ascii') } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], b'%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file'
Update how the document download test calls document-download-api
Update how the document download test calls document-download-api document-download-api now accepts json content - it was still possible to send files while we switched over, but we now want to only support json. This is the only place where we weren't already sending json to document-download-api.
Python
mit
alphagov/notifications-functional-tests,alphagov/notifications-functional-tests
import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( "{}/services/{}/documents".format(config['document_download']['api_host'], service_id), headers={ 'Authorization': "Bearer {}".format(config['document_download']['api_key']), }, files={ 'document': file_contents } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], '%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file' Update how the document download test calls document-download-api document-download-api now accepts json content - it was still possible to send files while we switched over, but we now want to only support json. This is the only place where we weren't already sending json to document-download-api.
import base64 import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( f"{config['document_download']['api_host']}/services/{service_id}/documents", headers={ 'Authorization': f"Bearer {config['document_download']['api_key']}", }, json={ 'document': base64.b64encode(file_contents).decode('ascii') } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], b'%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file'
<commit_before>import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( "{}/services/{}/documents".format(config['document_download']['api_host'], service_id), headers={ 'Authorization': "Bearer {}".format(config['document_download']['api_key']), }, files={ 'document': file_contents } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], '%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file' <commit_msg>Update how the document download test calls document-download-api document-download-api now accepts json content - it was still possible to send files while we switched over, but we now want to only support json. This is the only place where we weren't already sending json to document-download-api.<commit_after>
import base64 import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( f"{config['document_download']['api_host']}/services/{service_id}/documents", headers={ 'Authorization': f"Bearer {config['document_download']['api_key']}", }, json={ 'document': base64.b64encode(file_contents).decode('ascii') } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], b'%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file'
import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( "{}/services/{}/documents".format(config['document_download']['api_host'], service_id), headers={ 'Authorization': "Bearer {}".format(config['document_download']['api_key']), }, files={ 'document': file_contents } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], '%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file' Update how the document download test calls document-download-api document-download-api now accepts json content - it was still possible to send files while we switched over, but we now want to only support json. This is the only place where we weren't already sending json to document-download-api.import base64 import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( f"{config['document_download']['api_host']}/services/{service_id}/documents", headers={ 'Authorization': f"Bearer {config['document_download']['api_key']}", }, json={ 'document': base64.b64encode(file_contents).decode('ascii') } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], b'%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file'
<commit_before>import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( "{}/services/{}/documents".format(config['document_download']['api_host'], service_id), headers={ 'Authorization': "Bearer {}".format(config['document_download']['api_key']), }, files={ 'document': file_contents } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], '%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file' <commit_msg>Update how the document download test calls document-download-api document-download-api now accepts json content - it was still possible to send files while we switched over, but we now want to only support json. This is the only place where we weren't already sending json to document-download-api.<commit_after>import base64 import pytest import requests from retry.api import retry_call from config import config from tests.pages import DocumentDownloadLandingPage, DocumentDownloadPage def upload_document(service_id, file_contents): response = requests.post( f"{config['document_download']['api_host']}/services/{service_id}/documents", headers={ 'Authorization': f"Bearer {config['document_download']['api_key']}", }, json={ 'document': base64.b64encode(file_contents).decode('ascii') } ) json = response.json() assert 'error' not in json, 'Status code {}'.format(response.status_code) return json['document'] @pytest.mark.antivirus def test_document_upload_and_download(driver): document = retry_call( upload_document, # add PDF header to trick doc download into thinking its a real pdf fargs=[config['service']['id'], b'%PDF-1.4 functional tests file'], tries=3, delay=10 ) driver.get(document['url']) landing_page = DocumentDownloadLandingPage(driver) assert 'Functional Tests' in landing_page.get_service_name() landing_page.go_to_download_page() download_page = DocumentDownloadPage(driver) document_url = download_page.get_download_link() downloaded_document = requests.get(document_url) assert downloaded_document.text == '%PDF-1.4 functional tests file'
9d6c9a27829cad739a82eed7ce7fcc7740f9121b
fil_finder/tests/test_widths.py
fil_finder/tests/test_widths.py
from fil_finder.width import nonparam_width import numpy as np import numpy.testing as npt def test_nonparam(): pts = np.linspace(0, 10, 100) profile = 2.0*np.exp(-pts**2 / (2*3.0**2)) + 0.5 params, errors, fail = \ nonparam_width(pts, profile, pts, profile, 1.0, 5, 99) # This shouldn't be failing assert fail is False # Check the amplitude npt.assert_allclose(params[0], 2.5, atol=0.01) # Width npt.assert_allclose(params[1], 3.0, atol=0.01) # Background npt.assert_allclose(params[2], 0.5, atol=0.02)
Add test for non-parametric width
Add test for non-parametric width
Python
mit
e-koch/FilFinder
Add test for non-parametric width
from fil_finder.width import nonparam_width import numpy as np import numpy.testing as npt def test_nonparam(): pts = np.linspace(0, 10, 100) profile = 2.0*np.exp(-pts**2 / (2*3.0**2)) + 0.5 params, errors, fail = \ nonparam_width(pts, profile, pts, profile, 1.0, 5, 99) # This shouldn't be failing assert fail is False # Check the amplitude npt.assert_allclose(params[0], 2.5, atol=0.01) # Width npt.assert_allclose(params[1], 3.0, atol=0.01) # Background npt.assert_allclose(params[2], 0.5, atol=0.02)
<commit_before><commit_msg>Add test for non-parametric width<commit_after>
from fil_finder.width import nonparam_width import numpy as np import numpy.testing as npt def test_nonparam(): pts = np.linspace(0, 10, 100) profile = 2.0*np.exp(-pts**2 / (2*3.0**2)) + 0.5 params, errors, fail = \ nonparam_width(pts, profile, pts, profile, 1.0, 5, 99) # This shouldn't be failing assert fail is False # Check the amplitude npt.assert_allclose(params[0], 2.5, atol=0.01) # Width npt.assert_allclose(params[1], 3.0, atol=0.01) # Background npt.assert_allclose(params[2], 0.5, atol=0.02)
Add test for non-parametric width from fil_finder.width import nonparam_width import numpy as np import numpy.testing as npt def test_nonparam(): pts = np.linspace(0, 10, 100) profile = 2.0*np.exp(-pts**2 / (2*3.0**2)) + 0.5 params, errors, fail = \ nonparam_width(pts, profile, pts, profile, 1.0, 5, 99) # This shouldn't be failing assert fail is False # Check the amplitude npt.assert_allclose(params[0], 2.5, atol=0.01) # Width npt.assert_allclose(params[1], 3.0, atol=0.01) # Background npt.assert_allclose(params[2], 0.5, atol=0.02)
<commit_before><commit_msg>Add test for non-parametric width<commit_after> from fil_finder.width import nonparam_width import numpy as np import numpy.testing as npt def test_nonparam(): pts = np.linspace(0, 10, 100) profile = 2.0*np.exp(-pts**2 / (2*3.0**2)) + 0.5 params, errors, fail = \ nonparam_width(pts, profile, pts, profile, 1.0, 5, 99) # This shouldn't be failing assert fail is False # Check the amplitude npt.assert_allclose(params[0], 2.5, atol=0.01) # Width npt.assert_allclose(params[1], 3.0, atol=0.01) # Background npt.assert_allclose(params[2], 0.5, atol=0.02)
edc8248e6122dcfc1c4e6972ae0a4866de5c0d42
modules/urbandictionary.py
modules/urbandictionary.py
"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
Fix new lines in definition of UD module
Fix new lines in definition of UD module
Python
mit
billyvg/piebot
"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>') Fix new lines in definition of UD module
"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
<commit_before>"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>') <commit_msg>Fix new lines in definition of UD module<commit_after>
"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>') Fix new lines in definition of UD module"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
<commit_before>"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>') <commit_msg>Fix new lines in definition of UD module<commit_after>"""Looks up a term from urban dictionary @package ppbot @syntax ud <word> """ import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
5d6049be925330803f8c782d884cd318ad23ba28
repo-log.py
repo-log.py
#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter='\t', quotechar='|') repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit) else: iter_ = repo.iter_commits(args.from_) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, stats['files'], stats['lines'], stats['insertions'], stats['deletions']))
#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"', doublequote=True) repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit, no_merges=True) else: iter_ = repo.iter_commits(args.from_, no_merges=True) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, commit.hexsha, stats['files'], stats['lines'], stats['insertions'], stats['deletions']))
Remove merges from the log.
Remove merges from the log.
Python
mit
aplanas/hackweek11,aplanas/hackweek11
#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter='\t', quotechar='|') repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit) else: iter_ = repo.iter_commits(args.from_) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, stats['files'], stats['lines'], stats['insertions'], stats['deletions'])) Remove merges from the log.
#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"', doublequote=True) repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit, no_merges=True) else: iter_ = repo.iter_commits(args.from_, no_merges=True) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, commit.hexsha, stats['files'], stats['lines'], stats['insertions'], stats['deletions']))
<commit_before>#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter='\t', quotechar='|') repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit) else: iter_ = repo.iter_commits(args.from_) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, stats['files'], stats['lines'], stats['insertions'], stats['deletions'])) <commit_msg>Remove merges from the log.<commit_after>
#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"', doublequote=True) repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit, no_merges=True) else: iter_ = repo.iter_commits(args.from_, no_merges=True) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, commit.hexsha, stats['files'], stats['lines'], stats['insertions'], stats['deletions']))
#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter='\t', quotechar='|') repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit) else: iter_ = repo.iter_commits(args.from_) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, stats['files'], stats['lines'], stats['insertions'], stats['deletions'])) Remove merges from the log.#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"', doublequote=True) repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit, no_merges=True) else: iter_ = repo.iter_commits(args.from_, no_merges=True) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, commit.hexsha, stats['files'], stats['lines'], stats['insertions'], stats['deletions']))
<commit_before>#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter='\t', quotechar='|') repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit) else: iter_ = repo.iter_commits(args.from_) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, stats['files'], stats['lines'], stats['insertions'], stats['deletions'])) <commit_msg>Remove merges from the log.<commit_after>#! /usr/bin/env python import argparse import csv import git if __name__ == '__main__': parser = argparse.ArgumentParser(description='Extract git history information.') parser.add_argument('-f', '--from', dest='from_', help='from revno') parser.add_argument('-t', '--to', help='to revno') parser.add_argument('-l', '--limit', help='max number of commits') parser.add_argument('-p', '--project', help='project directory') parser.add_argument('-c', '--csv', help='csv file name') args = parser.parse_args() if not args.csv or not args.project: parser.print_help() exit(1) with open(args.csv, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"', doublequote=True) repo = git.Repo(args.project) if args.limit: iter_ = repo.iter_commits(args.from_, max_count=args.limit, no_merges=True) else: iter_ = repo.iter_commits(args.from_, no_merges=True) for commit in iter_: if commit.hexsha == args.to: break summary = commit.summary.encode('utf-8') message = commit.message.encode('utf-8') stats = commit.stats.total csvwriter.writerow((summary, message, commit.hexsha, stats['files'], stats['lines'], stats['insertions'], stats['deletions']))
94be56593a43101abc21b30b187d340e7ef8c3f0
runtests.py
runtests.py
#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests()
#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): if hasattr(django, 'setup'): django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests()
Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
Python
bsd-3-clause
vstoykov/django-sticky-uploads,caktus/django-sticky-uploads,caktus/django-sticky-uploads,vstoykov/django-sticky-uploads,caktus/django-sticky-uploads,vstoykov/django-sticky-uploads
#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests() Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.
#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): if hasattr(django, 'setup'): django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests()
<commit_before>#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests() <commit_msg>Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.<commit_after>
#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): if hasattr(django, 'setup'): django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests()
#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests() Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): if hasattr(django, 'setup'): django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests()
<commit_before>#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests() <commit_msg>Fix tests for Django 1.7 by calling the new setup and explicitly including MIDDLEWARE_CLASSES.<commit_after>#!/usr/bin/env python import sys import django from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'stickyuploads', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ROOT_URLCONF='stickyuploads.tests.urls', ) if hasattr(django, 'setup'): django.setup() from django.test.utils import get_runner def runtests(): if hasattr(django, 'setup'): django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['stickyuploads', ]) sys.exit(failures) if __name__ == '__main__': runtests()
e005a5a1d10df7a5a2f6a599c3419c89bd61eeb2
mrbelvedereci/cumulusci/admin.py
mrbelvedereci/cumulusci/admin.py
from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('repo','scratch') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin)
from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('name', 'scratch', 'repo') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin)
Add filters to Org model
Add filters to Org model
Python
bsd-3-clause
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('repo','scratch') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin) Add filters to Org model
from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('name', 'scratch', 'repo') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin)
<commit_before>from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('repo','scratch') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin) <commit_msg>Add filters to Org model<commit_after>
from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('name', 'scratch', 'repo') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin)
from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('repo','scratch') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin) Add filters to Org modelfrom django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('name', 'scratch', 'repo') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin)
<commit_before>from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('repo','scratch') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin) <commit_msg>Add filters to Org model<commit_after>from django.contrib import admin from mrbelvedereci.cumulusci.models import Org from mrbelvedereci.cumulusci.models import Service class OrgAdmin(admin.ModelAdmin): list_display = ('name','repo','scratch') list_filter = ('name', 'scratch', 'repo') admin.site.register(Org, OrgAdmin) class ServiceAdmin(admin.ModelAdmin): list_display = ('name',) admin.site.register(Service, ServiceAdmin)
d80b1465e6ea6019531a2bd1df4599e28afdebf4
openstackclient/tests/functional/network/v2/test_network_service_provider.py
openstackclient/tests/functional/network/v2/test_network_service_provider.py
# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = ['L3_ROUTER_NAT'] @testtools.skip('broken SDK testing') def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output)
# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = 'L3_ROUTER_NAT' def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output)
Fix network service provider functional test
Fix network service provider functional test SDK refactor broken network service provider functional test, tested this command works, but there is a error in the funtional test, so fix it. Change-Id: I783c58cedd39a05b665e47709b2b5321871e558b Closes-Bug: 1653138
Python
apache-2.0
dtroyer/python-openstackclient,openstack/python-openstackclient,dtroyer/python-openstackclient,openstack/python-openstackclient
# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = ['L3_ROUTER_NAT'] @testtools.skip('broken SDK testing') def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output) Fix network service provider functional test SDK refactor broken network service provider functional test, tested this command works, but there is a error in the funtional test, so fix it. Change-Id: I783c58cedd39a05b665e47709b2b5321871e558b Closes-Bug: 1653138
# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = 'L3_ROUTER_NAT' def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output)
<commit_before># Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = ['L3_ROUTER_NAT'] @testtools.skip('broken SDK testing') def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output) <commit_msg>Fix network service provider functional test SDK refactor broken network service provider functional test, tested this command works, but there is a error in the funtional test, so fix it. Change-Id: I783c58cedd39a05b665e47709b2b5321871e558b Closes-Bug: 1653138<commit_after>
# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = 'L3_ROUTER_NAT' def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output)
# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = ['L3_ROUTER_NAT'] @testtools.skip('broken SDK testing') def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output) Fix network service provider functional test SDK refactor broken network service provider functional test, tested this command works, but there is a error in the funtional test, so fix it. Change-Id: I783c58cedd39a05b665e47709b2b5321871e558b Closes-Bug: 1653138# Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = 'L3_ROUTER_NAT' def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output)
<commit_before># Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = ['L3_ROUTER_NAT'] @testtools.skip('broken SDK testing') def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output) <commit_msg>Fix network service provider functional test SDK refactor broken network service provider functional test, tested this command works, but there is a error in the funtional test, so fix it. Change-Id: I783c58cedd39a05b665e47709b2b5321871e558b Closes-Bug: 1653138<commit_after># Copyright (c) 2016, Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstackclient.tests.functional import base class TestNetworkServiceProvider(base.TestCase): """Functional tests for network service provider""" SERVICE_TYPE = 'L3_ROUTER_NAT' def test_network_service_provider_list(self): raw_output = self.openstack('network service provider list') self.assertIn(self.SERVICE_TYPE, raw_output)
35c76035be66516de744cd4266cf705991023cf2
logicaldelete/managers.py
logicaldelete/managers.py
from django.db import models class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs)
from django.db import models from logicaldelete.query import LogicalDeleteQuerySet class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return LogicalDeleteQuerySet(self.model, using=self._db).filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs)
Make sure QuerySet.delete() operation does not bypass protection
Make sure QuerySet.delete() operation does not bypass protection This fixes #1
Python
bsd-3-clause
angvp/django-logicaldelete,angvp/django-logical-delete,angvp/django-logicaldelete,Ubiwhere/pinax-models,angvp/django-logical-delete,naringas/pinax-models,pombredanne/django-logicaldelete,pinax/pinax-models
from django.db import models class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs) Make sure QuerySet.delete() operation does not bypass protection This fixes #1
from django.db import models from logicaldelete.query import LogicalDeleteQuerySet class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return LogicalDeleteQuerySet(self.model, using=self._db).filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs)
<commit_before>from django.db import models class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs) <commit_msg>Make sure QuerySet.delete() operation does not bypass protection This fixes #1<commit_after>
from django.db import models from logicaldelete.query import LogicalDeleteQuerySet class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return LogicalDeleteQuerySet(self.model, using=self._db).filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs)
from django.db import models class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs) Make sure QuerySet.delete() operation does not bypass protection This fixes #1from django.db import models from logicaldelete.query import LogicalDeleteQuerySet class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return LogicalDeleteQuerySet(self.model, using=self._db).filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs)
<commit_before>from django.db import models class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs) <commit_msg>Make sure QuerySet.delete() operation does not bypass protection This fixes #1<commit_after>from django.db import models from logicaldelete.query import LogicalDeleteQuerySet class LogicalDeletedManager(models.Manager): """ A manager that serves as the default manager for `logicaldelete.models.Model` providing the filtering out of logically deleted objects. In addition, it provides named querysets for getting the deleted objects. """ def get_query_set(self): if self.model: return LogicalDeleteQuerySet(self.model, using=self._db).filter( date_removed__isnull=True ) def all_with_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set() def only_deleted(self): if self.model: return super(LogicalDeletedManager, self).get_query_set().filter( date_removed__isnull=False ) def get(self, *args, **kwargs): return self.all_with_deleted().get(*args, **kwargs) def filter(self, *args, **kwargs): if "pk" in kwargs: return self.all_with_deleted().filter(*args, **kwargs) return self.get_query_set().filter(*args, **kwargs)
be2e68d077e90f1915274ac9b0e110cc82a3b126
zou/app/mixin.py
zou/app/mixin.py
from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None)
from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def clear_empty_fields(self, data): """ Remove fiels set to None from data dict. """ for key in list(data.keys()): if data[key] is None: del data[key] return data def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None)
Add a function to clean dict keys
[utils] Add a function to clean dict keys Remove None values.
Python
agpl-3.0
cgwire/zou
from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None) [utils] Add a function to clean dict keys Remove None values.
from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def clear_empty_fields(self, data): """ Remove fiels set to None from data dict. """ for key in list(data.keys()): if data[key] is None: del data[key] return data def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None)
<commit_before>from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None) <commit_msg>[utils] Add a function to clean dict keys Remove None values.<commit_after>
from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def clear_empty_fields(self, data): """ Remove fiels set to None from data dict. """ for key in list(data.keys()): if data[key] is None: del data[key] return data def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None)
from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None) [utils] Add a function to clean dict keys Remove None values.from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def clear_empty_fields(self, data): """ Remove fiels set to None from data dict. """ for key in list(data.keys()): if data[key] is None: del data[key] return data def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None)
<commit_before>from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None) <commit_msg>[utils] Add a function to clean dict keys Remove None values.<commit_after>from flask_restful import reqparse from flask import request class ArgsMixin(object): """ Helpers to retrieve parameters from GET or POST queries. """ def get_args(self, descriptors): parser = reqparse.RequestParser() for descriptor in descriptors: action = None if len(descriptor) == 4: (name, default, required, action) = descriptor else: (name, default, required) = descriptor parser.add_argument( name, required=required, default=default, action=action ) return parser.parse_args() def clear_empty_fields(self, data): """ Remove fiels set to None from data dict. """ for key in list(data.keys()): if data[key] is None: del data[key] return data def get_page(self): """ Returns page requested by the user. """ options = request.args return int(options.get("page", "-1")) def get_force(self): """ Returns force parameter. """ options = request.args return options.get("force", "false") == "true" def get_relations(self): """ Returns force parameter. """ options = request.args return options.get("relations", "false") == "true" def get_episode_id(self): """ Returns episode ID parameter. """ options = request.args return options.get("episode_id", None)
5b73fa155414f934912220ca4c448913a6a85a20
pandas/__init__.py
pandas/__init__.py
# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range
# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile, ExcelWriter from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range
Put ExcelWriter in pandas namespace
Put ExcelWriter in pandas namespace
Python
bsd-3-clause
amolkahat/pandas,nmartensen/pandas,harisbal/pandas,zfrenchee/pandas,DGrady/pandas,jmmease/pandas,datapythonista/pandas,gfyoung/pandas,jmmease/pandas,Winand/pandas,DGrady/pandas,gfyoung/pandas,pandas-dev/pandas,cython-testbed/pandas,TomAugspurger/pandas,TomAugspurger/pandas,louispotok/pandas,kdebrab/pandas,gfyoung/pandas,MJuddBooth/pandas,TomAugspurger/pandas,gfyoung/pandas,rs2/pandas,cbertinato/pandas,MJuddBooth/pandas,toobaz/pandas,dsm054/pandas,pandas-dev/pandas,linebp/pandas,pratapvardhan/pandas,linebp/pandas,cbertinato/pandas,cython-testbed/pandas,winklerand/pandas,gfyoung/pandas,Winand/pandas,jreback/pandas,DGrady/pandas,toobaz/pandas,cbertinato/pandas,dsm054/pandas,harisbal/pandas,MJuddBooth/pandas,winklerand/pandas,MJuddBooth/pandas,dsm054/pandas,harisbal/pandas,cython-testbed/pandas,jorisvandenbossche/pandas,zfrenchee/pandas,toobaz/pandas,kdebrab/pandas,louispotok/pandas,nmartensen/pandas,nmartensen/pandas,winklerand/pandas,amolkahat/pandas,GuessWhoSamFoo/pandas,louispotok/pandas,cython-testbed/pandas,zfrenchee/pandas,nmartensen/pandas,linebp/pandas,linebp/pandas,pratapvardhan/pandas,louispotok/pandas,datapythonista/pandas,dsm054/pandas,amolkahat/pandas,jmmease/pandas,pratapvardhan/pandas,zfrenchee/pandas,winklerand/pandas,rs2/pandas,cbertinato/pandas,pandas-dev/pandas,jorisvandenbossche/pandas,jmmease/pandas,kdebrab/pandas,jmmease/pandas,dsm054/pandas,amolkahat/pandas,Winand/pandas,kdebrab/pandas,toobaz/pandas,cbertinato/pandas,jreback/pandas,jreback/pandas,pratapvardhan/pandas,MJuddBooth/pandas,Winand/pandas,rs2/pandas,jorisvandenbossche/pandas,GuessWhoSamFoo/pandas,DGrady/pandas,winklerand/pandas,kdebrab/pandas,jorisvandenbossche/pandas,GuessWhoSamFoo/pandas,cython-testbed/pandas,GuessWhoSamFoo/pandas,louispotok/pandas,jreback/pandas,nmartensen/pandas,winklerand/pandas,jreback/pandas,nmartensen/pandas,linebp/pandas,DGrady/pandas,datapythonista/pandas,zfrenchee/pandas,toobaz/pandas,pandas-dev/pandas,harisbal/pandas,jmmease/pandas,harisbal/pandas,Winand/pandas,pratapvardhan/pandas,Winand/pandas,datapythonista/pandas,DGrady/pandas,amolkahat/pandas,rs2/pandas,GuessWhoSamFoo/pandas,TomAugspurger/pandas,linebp/pandas
# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range Put ExcelWriter in pandas namespace
# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile, ExcelWriter from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range
<commit_before># pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range <commit_msg>Put ExcelWriter in pandas namespace<commit_after>
# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile, ExcelWriter from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range
# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range Put ExcelWriter in pandas namespace# pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile, ExcelWriter from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range
<commit_before># pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range <commit_msg>Put ExcelWriter in pandas namespace<commit_after># pylint: disable-msg=W0614,W0401,W0611,W0622 __docformat__ = 'restructuredtext' from datetime import datetime import numpy as np try: import pandas._tseries as lib except Exception, e: # pragma: no cover if 'No module named' in str(e): raise ImportError('C extensions not built: if you installed already ' 'verify that you are not importing from the source ' 'directory') else: raise from pandas.version import version as __version__ from pandas.info import __doc__ from pandas.core.api import * from pandas.sparse.api import * from pandas.stats.api import * from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.io.parsers import read_csv, read_table, read_clipboard, ExcelFile, ExcelWriter from pandas.io.pytables import HDFStore from pandas.util.testing import debug from pandas.tools.merge import merge, concat from pandas.tools.pivot import pivot_table, crosstab from pandas.tools.describe import value_range
4193a49144227f8ff2694d602246d692ee9946bf
oauthenticator/__init__.py
oauthenticator/__init__.py
# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .generic import * from .google import * from .okpy import * from ._version import __version__, version_info
# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .google import * from .okpy import * from ._version import __version__, version_info
Delete extra import. Prepare for PR.
Delete extra import. Prepare for PR.
Python
bsd-3-clause
NickolausDS/oauthenticator,enolfc/oauthenticator,yuvipanda/mwoauthenticator,jupyter/oauthenticator,jupyter/oauthenticator,minrk/oauthenticator,yuvipanda/mwoauthenticator,jupyterhub/oauthenticator,maltevogl/oauthenticator
# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .generic import * from .google import * from .okpy import * from ._version import __version__, version_info Delete extra import. Prepare for PR.
# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .google import * from .okpy import * from ._version import __version__, version_info
<commit_before># include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .generic import * from .google import * from .okpy import * from ._version import __version__, version_info <commit_msg>Delete extra import. Prepare for PR.<commit_after>
# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .google import * from .okpy import * from ._version import __version__, version_info
# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .generic import * from .google import * from .okpy import * from ._version import __version__, version_info Delete extra import. Prepare for PR.# include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .google import * from .okpy import * from ._version import __version__, version_info
<commit_before># include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .generic import * from .google import * from .okpy import * from ._version import __version__, version_info <commit_msg>Delete extra import. Prepare for PR.<commit_after># include github, bitbucket, google here for backward-compatibility # don't add new oauthenticators here. from .oauth2 import * from .github import * from .bitbucket import * from .google import * from .okpy import * from ._version import __version__, version_info
f875fb3973fe5f99f2bc343d2685774bc388deb3
version.py
version.py
major = 0 minor=0 patch=0 branch="dev" timestamp=1376506569.35
major = 0 minor=0 patch=13 branch="master" timestamp=1376507610.99
Tag commit for v0.0.13-master generated by gitmake.py
Tag commit for v0.0.13-master generated by gitmake.py
Python
mit
ryansturmer/gitmake
major = 0 minor=0 patch=0 branch="dev" timestamp=1376506569.35Tag commit for v0.0.13-master generated by gitmake.py
major = 0 minor=0 patch=13 branch="master" timestamp=1376507610.99
<commit_before>major = 0 minor=0 patch=0 branch="dev" timestamp=1376506569.35<commit_msg>Tag commit for v0.0.13-master generated by gitmake.py<commit_after>
major = 0 minor=0 patch=13 branch="master" timestamp=1376507610.99
major = 0 minor=0 patch=0 branch="dev" timestamp=1376506569.35Tag commit for v0.0.13-master generated by gitmake.pymajor = 0 minor=0 patch=13 branch="master" timestamp=1376507610.99
<commit_before>major = 0 minor=0 patch=0 branch="dev" timestamp=1376506569.35<commit_msg>Tag commit for v0.0.13-master generated by gitmake.py<commit_after>major = 0 minor=0 patch=13 branch="master" timestamp=1376507610.99
0d1300165b2d33802124917d477047f7b414a69c
plugins/Tools/ScaleTool/ScaleToolHandle.py
plugins/Tools/ScaleTool/ScaleToolHandle.py
from UM.Scene.ToolHandle import ToolHandle class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) md = self.getMeshData() md.addVertex(0, 0, 0) md.addVertex(0, 20, 0) md.addVertex(0, 0, 0) md.addVertex(20, 0, 0) md.addVertex(0, 0, 0) md.addVertex(0, 0, 20)
from UM.Scene.ToolHandle import ToolHandle from UM.Mesh.MeshData import MeshData from UM.Mesh.MeshBuilder import MeshBuilder from UM.Math.Vector import Vector class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) lines = MeshData() lines.addVertex(0, 0, 0) lines.addVertex(0, 20, 0) lines.addVertex(0, 0, 0) lines.addVertex(20, 0, 0) lines.addVertex(0, 0, 0) lines.addVertex(0, 0, 20) lines.setVertexColor(0, ToolHandle.YAxisColor) lines.setVertexColor(1, ToolHandle.YAxisColor) lines.setVertexColor(2, ToolHandle.XAxisColor) lines.setVertexColor(3, ToolHandle.XAxisColor) lines.setVertexColor(4, ToolHandle.ZAxisColor) lines.setVertexColor(5, ToolHandle.ZAxisColor) self.setLineMesh(lines) mb = MeshBuilder() mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 0), color = ToolHandle.AllAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 20, 0), color = ToolHandle.YAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(20, 0, 0), color = ToolHandle.XAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 20), color = ToolHandle.ZAxisColor ) self.setSolidMesh(mb.getData())
Implement proper scale tool handles
Implement proper scale tool handles
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
from UM.Scene.ToolHandle import ToolHandle class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) md = self.getMeshData() md.addVertex(0, 0, 0) md.addVertex(0, 20, 0) md.addVertex(0, 0, 0) md.addVertex(20, 0, 0) md.addVertex(0, 0, 0) md.addVertex(0, 0, 20) Implement proper scale tool handles
from UM.Scene.ToolHandle import ToolHandle from UM.Mesh.MeshData import MeshData from UM.Mesh.MeshBuilder import MeshBuilder from UM.Math.Vector import Vector class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) lines = MeshData() lines.addVertex(0, 0, 0) lines.addVertex(0, 20, 0) lines.addVertex(0, 0, 0) lines.addVertex(20, 0, 0) lines.addVertex(0, 0, 0) lines.addVertex(0, 0, 20) lines.setVertexColor(0, ToolHandle.YAxisColor) lines.setVertexColor(1, ToolHandle.YAxisColor) lines.setVertexColor(2, ToolHandle.XAxisColor) lines.setVertexColor(3, ToolHandle.XAxisColor) lines.setVertexColor(4, ToolHandle.ZAxisColor) lines.setVertexColor(5, ToolHandle.ZAxisColor) self.setLineMesh(lines) mb = MeshBuilder() mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 0), color = ToolHandle.AllAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 20, 0), color = ToolHandle.YAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(20, 0, 0), color = ToolHandle.XAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 20), color = ToolHandle.ZAxisColor ) self.setSolidMesh(mb.getData())
<commit_before>from UM.Scene.ToolHandle import ToolHandle class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) md = self.getMeshData() md.addVertex(0, 0, 0) md.addVertex(0, 20, 0) md.addVertex(0, 0, 0) md.addVertex(20, 0, 0) md.addVertex(0, 0, 0) md.addVertex(0, 0, 20) <commit_msg>Implement proper scale tool handles<commit_after>
from UM.Scene.ToolHandle import ToolHandle from UM.Mesh.MeshData import MeshData from UM.Mesh.MeshBuilder import MeshBuilder from UM.Math.Vector import Vector class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) lines = MeshData() lines.addVertex(0, 0, 0) lines.addVertex(0, 20, 0) lines.addVertex(0, 0, 0) lines.addVertex(20, 0, 0) lines.addVertex(0, 0, 0) lines.addVertex(0, 0, 20) lines.setVertexColor(0, ToolHandle.YAxisColor) lines.setVertexColor(1, ToolHandle.YAxisColor) lines.setVertexColor(2, ToolHandle.XAxisColor) lines.setVertexColor(3, ToolHandle.XAxisColor) lines.setVertexColor(4, ToolHandle.ZAxisColor) lines.setVertexColor(5, ToolHandle.ZAxisColor) self.setLineMesh(lines) mb = MeshBuilder() mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 0), color = ToolHandle.AllAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 20, 0), color = ToolHandle.YAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(20, 0, 0), color = ToolHandle.XAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 20), color = ToolHandle.ZAxisColor ) self.setSolidMesh(mb.getData())
from UM.Scene.ToolHandle import ToolHandle class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) md = self.getMeshData() md.addVertex(0, 0, 0) md.addVertex(0, 20, 0) md.addVertex(0, 0, 0) md.addVertex(20, 0, 0) md.addVertex(0, 0, 0) md.addVertex(0, 0, 20) Implement proper scale tool handlesfrom UM.Scene.ToolHandle import ToolHandle from UM.Mesh.MeshData import MeshData from UM.Mesh.MeshBuilder import MeshBuilder from UM.Math.Vector import Vector class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) lines = MeshData() lines.addVertex(0, 0, 0) lines.addVertex(0, 20, 0) lines.addVertex(0, 0, 0) lines.addVertex(20, 0, 0) lines.addVertex(0, 0, 0) lines.addVertex(0, 0, 20) lines.setVertexColor(0, ToolHandle.YAxisColor) lines.setVertexColor(1, ToolHandle.YAxisColor) lines.setVertexColor(2, ToolHandle.XAxisColor) lines.setVertexColor(3, ToolHandle.XAxisColor) lines.setVertexColor(4, ToolHandle.ZAxisColor) lines.setVertexColor(5, ToolHandle.ZAxisColor) self.setLineMesh(lines) mb = MeshBuilder() mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 0), color = ToolHandle.AllAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 20, 0), color = ToolHandle.YAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(20, 0, 0), color = ToolHandle.XAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 20), color = ToolHandle.ZAxisColor ) self.setSolidMesh(mb.getData())
<commit_before>from UM.Scene.ToolHandle import ToolHandle class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) md = self.getMeshData() md.addVertex(0, 0, 0) md.addVertex(0, 20, 0) md.addVertex(0, 0, 0) md.addVertex(20, 0, 0) md.addVertex(0, 0, 0) md.addVertex(0, 0, 20) <commit_msg>Implement proper scale tool handles<commit_after>from UM.Scene.ToolHandle import ToolHandle from UM.Mesh.MeshData import MeshData from UM.Mesh.MeshBuilder import MeshBuilder from UM.Math.Vector import Vector class ScaleToolHandle(ToolHandle): def __init__(self, parent = None): super().__init__(parent) lines = MeshData() lines.addVertex(0, 0, 0) lines.addVertex(0, 20, 0) lines.addVertex(0, 0, 0) lines.addVertex(20, 0, 0) lines.addVertex(0, 0, 0) lines.addVertex(0, 0, 20) lines.setVertexColor(0, ToolHandle.YAxisColor) lines.setVertexColor(1, ToolHandle.YAxisColor) lines.setVertexColor(2, ToolHandle.XAxisColor) lines.setVertexColor(3, ToolHandle.XAxisColor) lines.setVertexColor(4, ToolHandle.ZAxisColor) lines.setVertexColor(5, ToolHandle.ZAxisColor) self.setLineMesh(lines) mb = MeshBuilder() mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 0), color = ToolHandle.AllAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 20, 0), color = ToolHandle.YAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(20, 0, 0), color = ToolHandle.XAxisColor ) mb.addCube( width = 2, height = 2, depth = 2, center = Vector(0, 0, 20), color = ToolHandle.ZAxisColor ) self.setSolidMesh(mb.getData())
13e86e405a3b7e2933a5f7fca14d7903f30201ee
Largest_Palindrome_Product.py
Largest_Palindrome_Product.py
# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number1 = "" number2 = "" for x in range(n): number1 += "9" number2 += "9" number1 = int(number1) number2 = int(number2) palindrome = 0 for x in range(number1 + 1): for i in range(number2 + 1): product = x * i if (str(product) == str(product)[::-1]) and product > palindrome: palindrome = product return palindrome % 1337 n = 2 print(largestPalindrome(n))
# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
Solve Largest Palindrome Product for range of n is
Solve Largest Palindrome Product for range of n is [1,6]
Python
mit
Kunal57/Python_Algorithms
# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number1 = "" number2 = "" for x in range(n): number1 += "9" number2 += "9" number1 = int(number1) number2 = int(number2) palindrome = 0 for x in range(number1 + 1): for i in range(number2 + 1): product = x * i if (str(product) == str(product)[::-1]) and product > palindrome: palindrome = product return palindrome % 1337 n = 2 print(largestPalindrome(n))Solve Largest Palindrome Product for range of n is [1,6]
# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
<commit_before># Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number1 = "" number2 = "" for x in range(n): number1 += "9" number2 += "9" number1 = int(number1) number2 = int(number2) palindrome = 0 for x in range(number1 + 1): for i in range(number2 + 1): product = x * i if (str(product) == str(product)[::-1]) and product > palindrome: palindrome = product return palindrome % 1337 n = 2 print(largestPalindrome(n))<commit_msg>Solve Largest Palindrome Product for range of n is [1,6]<commit_after>
# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number1 = "" number2 = "" for x in range(n): number1 += "9" number2 += "9" number1 = int(number1) number2 = int(number2) palindrome = 0 for x in range(number1 + 1): for i in range(number2 + 1): product = x * i if (str(product) == str(product)[::-1]) and product > palindrome: palindrome = product return palindrome % 1337 n = 2 print(largestPalindrome(n))Solve Largest Palindrome Product for range of n is [1,6]# Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
<commit_before># Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number1 = "" number2 = "" for x in range(n): number1 += "9" number2 += "9" number1 = int(number1) number2 = int(number2) palindrome = 0 for x in range(number1 + 1): for i in range(number2 + 1): product = x * i if (str(product) == str(product)[::-1]) and product > palindrome: palindrome = product return palindrome % 1337 n = 2 print(largestPalindrome(n))<commit_msg>Solve Largest Palindrome Product for range of n is [1,6]<commit_after># Find the largest palindrome made from the product of two n-digit numbers. # Since the result could be very large, you should return the largest palindrome mod 1337. # Example: # Input: 2 # Output: 987 # Explanation: 99 x 91 = 9009, 9009 % 1337 = 987 # Note: # The range of n is [1,8]. def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
feb147bf3ed487a72128c3bbd3f5a0548c26933a
src/tests/program_page_test.py
src/tests/program_page_test.py
from lib.constants.test import create_new_program from lib import base, page class TestProgramPage(base.Test): def create_private_program_test(self): dashboard = page.dashboard.DashboardPage(self.driver) dashboard.navigate_to() lhn_menu = dashboard.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close()
from lib.constants.test import create_new_program from lib.page import dashboard from lib import base class TestProgramPage(base.Test): def create_private_program_test(self): dashboard_page = dashboard.DashboardPage(self.driver) dashboard_page.navigate_to() lhn_menu = dashboard_page.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close()
Fix import error for program page test
Fix import error for program page test
Python
apache-2.0
VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core
from lib.constants.test import create_new_program from lib import base, page class TestProgramPage(base.Test): def create_private_program_test(self): dashboard = page.dashboard.DashboardPage(self.driver) dashboard.navigate_to() lhn_menu = dashboard.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close() Fix import error for program page test
from lib.constants.test import create_new_program from lib.page import dashboard from lib import base class TestProgramPage(base.Test): def create_private_program_test(self): dashboard_page = dashboard.DashboardPage(self.driver) dashboard_page.navigate_to() lhn_menu = dashboard_page.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close()
<commit_before>from lib.constants.test import create_new_program from lib import base, page class TestProgramPage(base.Test): def create_private_program_test(self): dashboard = page.dashboard.DashboardPage(self.driver) dashboard.navigate_to() lhn_menu = dashboard.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close() <commit_msg>Fix import error for program page test<commit_after>
from lib.constants.test import create_new_program from lib.page import dashboard from lib import base class TestProgramPage(base.Test): def create_private_program_test(self): dashboard_page = dashboard.DashboardPage(self.driver) dashboard_page.navigate_to() lhn_menu = dashboard_page.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close()
from lib.constants.test import create_new_program from lib import base, page class TestProgramPage(base.Test): def create_private_program_test(self): dashboard = page.dashboard.DashboardPage(self.driver) dashboard.navigate_to() lhn_menu = dashboard.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close() Fix import error for program page testfrom lib.constants.test import create_new_program from lib.page import dashboard from lib import base class TestProgramPage(base.Test): def create_private_program_test(self): dashboard_page = dashboard.DashboardPage(self.driver) dashboard_page.navigate_to() lhn_menu = dashboard_page.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close()
<commit_before>from lib.constants.test import create_new_program from lib import base, page class TestProgramPage(base.Test): def create_private_program_test(self): dashboard = page.dashboard.DashboardPage(self.driver) dashboard.navigate_to() lhn_menu = dashboard.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close() <commit_msg>Fix import error for program page test<commit_after>from lib.constants.test import create_new_program from lib.page import dashboard from lib import base class TestProgramPage(base.Test): def create_private_program_test(self): dashboard_page = dashboard.DashboardPage(self.driver) dashboard_page.navigate_to() lhn_menu = dashboard_page.open_lhn_menu() lhn_menu.select_all_objects() program_dropdown = lhn_menu.open_programs() new_program_page = program_dropdown.open_create_new_program() new_program_page.enter_title(create_new_program.TEST_TITLE) new_program_page.enter_description(create_new_program.TEST_DESCRIPTION) new_program_page.enter_notes(create_new_program.TEST_NOTES) new_program_page.enter_code(create_new_program.TEST_CODE) new_program_page.checkbox_check_private_program() new_program_page.enter_primary_contact( create_new_program.TEST_PRIMARY_CONTACT ) new_program_page.enter_secondary_contact( create_new_program.TEST_SECONDARY_CONTACT ) new_program_page.enter_program_url(create_new_program.TEST_PROGRAM_URL) new_program_page.enter_reference_url( create_new_program.TEST_REFERENCE_URL ) new_program_page.enter_effective_date_start_month() new_program_page.enter_stop_date_end_month() new_program_page.save_and_close()
2bddcc714670f0a5b54c9f0ee021f04f061f9325
tests/test__utils.py
tests/test__utils.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2, 1,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2, 1,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
Check N-D with N>2 raises in utils
Check N-D with N>2 raises in utils Permit 1-D and 2-D arrays to be handled by internal utilities.
Python
bsd-3-clause
jakirkham/dask-distance
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all() Check N-D with N>2 raises in utils Permit 1-D and 2-D arrays to be handled by internal utilities.
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2, 1,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2, 1,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all() <commit_msg>Check N-D with N>2 raises in utils Permit 1-D and 2-D arrays to be handled by internal utilities.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2, 1,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2, 1,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all() Check N-D with N>2 raises in utils Permit 1-D and 2-D arrays to be handled by internal utilities.#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2, 1,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2, 1,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all() <commit_msg>Check N-D with N>2 raises in utils Permit 1-D and 2-D arrays to be handled by internal utilities.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest import numpy as np import dask_distance._utils @pytest.mark.parametrize("et, u, v", [ (ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)), (ValueError, np.zeros((1, 2, 1,), dtype=bool), np.zeros((2,), dtype=bool)), (ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2, 1,), dtype=bool)), ]) def test__bool_cmp_mtx_cnt_err(et, u, v): with pytest.raises(et): dask_distance._utils._bool_cmp_mtx_cnt(u, v) def test__bool_cmp_mtx_cnt(): u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool) v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool) uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v) uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float) assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
7c02cf4321677988473d78e56e18e1f96061f356
src/qraz/urls.py
src/qraz/urls.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', 'qraz.frontend.views.handle404'), ] handler404 = 'qraz.frontend.views.handle404'
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin from qraz.frontend.views import handle404 urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', handle404), ] handler404 = handle404
Fix deprecated view method passing.
Fix deprecated view method passing.
Python
bsd-2-clause
fladi/qraz,fladi/qraz,fladi/qraz
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', 'qraz.frontend.views.handle404'), ] handler404 = 'qraz.frontend.views.handle404' Fix deprecated view method passing.
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin from qraz.frontend.views import handle404 urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', handle404), ] handler404 = handle404
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', 'qraz.frontend.views.handle404'), ] handler404 = 'qraz.frontend.views.handle404' <commit_msg>Fix deprecated view method passing.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin from qraz.frontend.views import handle404 urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', handle404), ] handler404 = handle404
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', 'qraz.frontend.views.handle404'), ] handler404 = 'qraz.frontend.views.handle404' Fix deprecated view method passing.#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin from qraz.frontend.views import handle404 urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', handle404), ] handler404 = handle404
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', 'qraz.frontend.views.handle404'), ] handler404 = 'qraz.frontend.views.handle404' <commit_msg>Fix deprecated view method passing.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.conf import settings from django.conf.urls import include, url from django.contrib import admin from qraz.frontend.views import handle404 urlpatterns = [ url( r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework' ) ), url( r'^oauth2/', include( 'oauth2_provider.urls', namespace='oauth2_provider' ) ), url( r'^admin/', include(admin.site.urls) ), url( r'^', include('social.apps.django_app.urls', namespace='social') ), url( r'^', include('qraz.frontend.urls', namespace='qraz') ), ] if settings.DEBUG: urlpatterns += [ url(r'^404$', handle404), ] handler404 = handle404
675178e41719da258941449f8457a6500c43ad9a
tool/zeroconf_ssh.py
tool/zeroconf_ssh.py
#!/usr/bin/python import socket import time from zeroconf import * def main(): print "Register SSH service ..." service_type = "_ssh._tcp.local." info = ServiceInfo(service_type, "RPi3." + service_type, socket.inet_aton("127.0.0.1"), 22, 0, 0, "", None) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main()
#!/usr/bin/python import socket import time from zeroconf import * def main(): service_type = "_ssh._tcp.local." service_port = 22 service_addr = socket.gethostbyname(socket.gethostname()) service_name = socket.gethostname().replace('.local', '.') info = ServiceInfo(service_type, service_name + service_type, socket.inet_aton(service_addr), service_port, 0, 0, "", None) print "Register SSH service %s on %s ..." % (socket.gethostname(), service_addr) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main()
Support use real ip and host name for service
Support use real ip and host name for service
Python
apache-2.0
TimonLio/rex-pi,TimonLio/rex-pi
#!/usr/bin/python import socket import time from zeroconf import * def main(): print "Register SSH service ..." service_type = "_ssh._tcp.local." info = ServiceInfo(service_type, "RPi3." + service_type, socket.inet_aton("127.0.0.1"), 22, 0, 0, "", None) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main() Support use real ip and host name for service
#!/usr/bin/python import socket import time from zeroconf import * def main(): service_type = "_ssh._tcp.local." service_port = 22 service_addr = socket.gethostbyname(socket.gethostname()) service_name = socket.gethostname().replace('.local', '.') info = ServiceInfo(service_type, service_name + service_type, socket.inet_aton(service_addr), service_port, 0, 0, "", None) print "Register SSH service %s on %s ..." % (socket.gethostname(), service_addr) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main()
<commit_before>#!/usr/bin/python import socket import time from zeroconf import * def main(): print "Register SSH service ..." service_type = "_ssh._tcp.local." info = ServiceInfo(service_type, "RPi3." + service_type, socket.inet_aton("127.0.0.1"), 22, 0, 0, "", None) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main() <commit_msg>Support use real ip and host name for service<commit_after>
#!/usr/bin/python import socket import time from zeroconf import * def main(): service_type = "_ssh._tcp.local." service_port = 22 service_addr = socket.gethostbyname(socket.gethostname()) service_name = socket.gethostname().replace('.local', '.') info = ServiceInfo(service_type, service_name + service_type, socket.inet_aton(service_addr), service_port, 0, 0, "", None) print "Register SSH service %s on %s ..." % (socket.gethostname(), service_addr) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main()
#!/usr/bin/python import socket import time from zeroconf import * def main(): print "Register SSH service ..." service_type = "_ssh._tcp.local." info = ServiceInfo(service_type, "RPi3." + service_type, socket.inet_aton("127.0.0.1"), 22, 0, 0, "", None) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main() Support use real ip and host name for service#!/usr/bin/python import socket import time from zeroconf import * def main(): service_type = "_ssh._tcp.local." service_port = 22 service_addr = socket.gethostbyname(socket.gethostname()) service_name = socket.gethostname().replace('.local', '.') info = ServiceInfo(service_type, service_name + service_type, socket.inet_aton(service_addr), service_port, 0, 0, "", None) print "Register SSH service %s on %s ..." % (socket.gethostname(), service_addr) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main()
<commit_before>#!/usr/bin/python import socket import time from zeroconf import * def main(): print "Register SSH service ..." service_type = "_ssh._tcp.local." info = ServiceInfo(service_type, "RPi3." + service_type, socket.inet_aton("127.0.0.1"), 22, 0, 0, "", None) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main() <commit_msg>Support use real ip and host name for service<commit_after>#!/usr/bin/python import socket import time from zeroconf import * def main(): service_type = "_ssh._tcp.local." service_port = 22 service_addr = socket.gethostbyname(socket.gethostname()) service_name = socket.gethostname().replace('.local', '.') info = ServiceInfo(service_type, service_name + service_type, socket.inet_aton(service_addr), service_port, 0, 0, "", None) print "Register SSH service %s on %s ..." % (socket.gethostname(), service_addr) zc = Zeroconf() zc.register_service(info) try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: print("Unregistering ...") zc.unregister_service(info) zc.close() if __name__ == '__main__': main()
72ca8573f97c5950a7d6a885be295fddb34cb088
recipyGui/views.py
recipyGui/views.py
from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: runs = [r for r in mongo.db.recipies.find({})] else: # TODO: search runs using the query string runs = [] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list'))
from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: # Return all runs, ordered by date (oldest run first) runs = [r for r in mongo.db.recipies.find({}).sort('date', -1)] else: # Search runs using the query string q = { '$text': { '$search': query} } runs = [r for r in mongo.db.recipies.find(q)] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list'))
Add search functionality for runs
Add search functionality for runs Add search functionality for runs. Runs are searched using MongoDB's full text search capabilities. Note that this requires a text index on the collection (command used to create the text index: db.recipies.createIndex({ "$**": "text" }, { name: "TextIndex" }, {language: "none"}) ) Closes #2
Python
apache-2.0
musically-ut/recipy,github4ry/recipy,bsipocz/recipy,MichielCottaar/recipy,recipy/recipy-gui,MichielCottaar/recipy,bsipocz/recipy,MBARIMike/recipy,github4ry/recipy,musically-ut/recipy,MBARIMike/recipy,recipy/recipy,recipy/recipy,recipy/recipy-gui
from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: runs = [r for r in mongo.db.recipies.find({})] else: # TODO: search runs using the query string runs = [] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list')) Add search functionality for runs Add search functionality for runs. Runs are searched using MongoDB's full text search capabilities. Note that this requires a text index on the collection (command used to create the text index: db.recipies.createIndex({ "$**": "text" }, { name: "TextIndex" }, {language: "none"}) ) Closes #2
from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: # Return all runs, ordered by date (oldest run first) runs = [r for r in mongo.db.recipies.find({}).sort('date', -1)] else: # Search runs using the query string q = { '$text': { '$search': query} } runs = [r for r in mongo.db.recipies.find(q)] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list'))
<commit_before>from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: runs = [r for r in mongo.db.recipies.find({})] else: # TODO: search runs using the query string runs = [] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list')) <commit_msg>Add search functionality for runs Add search functionality for runs. Runs are searched using MongoDB's full text search capabilities. Note that this requires a text index on the collection (command used to create the text index: db.recipies.createIndex({ "$**": "text" }, { name: "TextIndex" }, {language: "none"}) ) Closes #2<commit_after>
from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: # Return all runs, ordered by date (oldest run first) runs = [r for r in mongo.db.recipies.find({}).sort('date', -1)] else: # Search runs using the query string q = { '$text': { '$search': query} } runs = [r for r in mongo.db.recipies.find(q)] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list'))
from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: runs = [r for r in mongo.db.recipies.find({})] else: # TODO: search runs using the query string runs = [] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list')) Add search functionality for runs Add search functionality for runs. Runs are searched using MongoDB's full text search capabilities. Note that this requires a text index on the collection (command used to create the text index: db.recipies.createIndex({ "$**": "text" }, { name: "TextIndex" }, {language: "none"}) ) Closes #2from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: # Return all runs, ordered by date (oldest run first) runs = [r for r in mongo.db.recipies.find({}).sort('date', -1)] else: # Search runs using the query string q = { '$text': { '$search': query} } runs = [r for r in mongo.db.recipies.find(q)] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list'))
<commit_before>from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: runs = [r for r in mongo.db.recipies.find({})] else: # TODO: search runs using the query string runs = [] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list')) <commit_msg>Add search functionality for runs Add search functionality for runs. Runs are searched using MongoDB's full text search capabilities. Note that this requires a text index on the collection (command used to create the text index: db.recipies.createIndex({ "$**": "text" }, { name: "TextIndex" }, {language: "none"}) ) Closes #2<commit_after>from flask import Blueprint, request, redirect, render_template, url_for from flask.views import MethodView from recipyGui import recipyGui, mongo from forms import SearchForm runs = Blueprint('runs', __name__, template_folder='templates') @recipyGui.route('/') def index(): form = SearchForm() query = request.args.get('query', '') if not query: # Return all runs, ordered by date (oldest run first) runs = [r for r in mongo.db.recipies.find({}).sort('date', -1)] else: # Search runs using the query string q = { '$text': { '$search': query} } runs = [r for r in mongo.db.recipies.find(q)] print 'runs:', runs print 'query:', query return render_template('runs/list.html', runs=runs, query=query, form=form) #class ListView(MethodView): # def get(self): # runs = Run.objects.all() # print runs # return render_template('runs/list.html', runs=runs) # Register urls #runs.add_url_rule('/', view_func=ListView.as_view('list'))
dfce3efecacfa53654e06ea9be94407155fe7e4c
airship/__init__.py
airship/__init__.py
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
Create a route for fetching grefs
Create a route for fetching grefs
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app Create a route for fetching grefs
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
<commit_before>import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app <commit_msg>Create a route for fetching grefs<commit_after>
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app Create a route for fetching grefsimport os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
<commit_before>import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) return app <commit_msg>Create a route for fetching grefs<commit_after>import os import json from flask import Flask, render_template def channels_json(station, escaped=False): channels = [{"name": channel} for channel in station.channels()] jsonbody = json.dumps(channels) if escaped: jsonbody = jsonbody.replace("</", "<\\/") return jsonbody def make_airship(station): app = Flask(__name__) @app.route("/") def index(): return render_template("index.html", channels_json=channels_json(station, True)) @app.route("/channels") def list_channels(): return channels_json(station) @app.route("/grefs/<channel>") def list_grefs(channel): return return app
3523501d5d8fdd6451dedf59258392872ba9af09
ignition/__init__.py
ignition/__init__.py
""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import * import flame import int_gen import riemann
""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import *
Remove languages from default import to stop hard requirement on Mako
Remove languages from default import to stop hard requirement on Mako
Python
bsd-3-clause
IgnitionProject/ignition,IgnitionProject/ignition,IgnitionProject/ignition
""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import * import flame import int_gen import riemann Remove languages from default import to stop hard requirement on Mako
""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import *
<commit_before>""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import * import flame import int_gen import riemann <commit_msg>Remove languages from default import to stop hard requirement on Mako<commit_after>
""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import *
""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import * import flame import int_gen import riemann Remove languages from default import to stop hard requirement on Mako""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import *
<commit_before>""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import * import flame import int_gen import riemann <commit_msg>Remove languages from default import to stop hard requirement on Mako<commit_after>""" Ignition is a numerical code generator. See README in top level for more details """ __version__ = "0.0.1-git" def __ignition_debug(): # helper function so we don't import os globally import os return eval(os.getenv('IGNITION_DEBUG', 'False')) IGNITION_DEBUG = __ignition_debug() from utils import *
c83c63a6b1ff1cc6d6d4f71f2da3affbb167738d
tabler/tabler.py
tabler/tabler.py
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells)
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
Use a class to store row data, and allow lookup via index or header cell value.
Use a class to store row data, and allow lookup via index or header cell value.
Python
bsd-3-clause
bschmeck/tabler
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells) Use a class to store row data, and allow lookup via index or header cell value.
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
<commit_before>from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells) <commit_msg>Use a class to store row data, and allow lookup via index or header cell value.<commit_after>
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells) Use a class to store row data, and allow lookup via index or header cell value.from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
<commit_before>from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells) <commit_msg>Use a class to store row data, and allow lookup via index or header cell value.<commit_after>from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
34f3d2d8c7828036deaeae7a78d33de2412759e3
migrant.py
migrant.py
import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content)
#!/usr/bin/env python import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content)
Add shebang and make file executable
Add shebang and make file executable
Python
mit
ranisalt/moita-migrant
import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content) Add shebang and make file executable
#!/usr/bin/env python import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content)
<commit_before>import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content) <commit_msg>Add shebang and make file executable<commit_after>
#!/usr/bin/env python import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content)
import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content) Add shebang and make file executable#!/usr/bin/env python import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content)
<commit_before>import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content) <commit_msg>Add shebang and make file executable<commit_after>#!/usr/bin/env python import argparse import binascii import datetime import gzip import json import magic import os import pymongo import sys def read_gzip(filename): with gzip.open(filename) as file: content = file.read() return content def read_plain(filename): with open(filename) as file: content = file.read() return content readers = { b'application/x-gzip': read_gzip, b'text/plain': read_plain, } def read(filename): type = magic.from_file(filename, mime=True) return readers[type](filename).decode() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', help='path to moita configuration file', dest='moita', metavar='MOITA', required=True) parser.add_argument('filename', nargs='+') args = parser.parse_args() sys.path.append(os.path.dirname(args.moita)) import config connection = pymongo.MongoClient() collection = connection[config.DATABASE].timetables for file in args.filename: content = json.loads(read(file)) identifier = binascii.unhexlify( os.path.basename(file).split('.', 1)[0]).decode() content['_id'] = identifier mtime = datetime.datetime.fromtimestamp(os.path.getmtime(file)) content['updated_at'] = mtime collection.save(content)
c44be6418bbf92121e56bf68d6c8e2ebef483e17
script/generate_amalgamation.py
script/generate_amalgamation.py
#!/usr/bin/env python import sys import os.path import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() def add_file(filename): basename = os.path.basename(filename) # Only include each file at most once. if basename in seen_files: return seen_files.add(basename) path = os.path.dirname(filename) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(os.path.join(path, m.group(1))) else: sys.stdout.write(line) for f in sys.argv[1:]: add_file(f)
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f)
Add comments for file start/end
Add comments for file start/end
Python
mit
Nave-Neel/wren,Nave-Neel/wren,minirop/wren,bigdimboom/wren,foresterre/wren,Nelarius/wren,minirop/wren,Nave-Neel/wren,foresterre/wren,Rohansi/wren,Rohansi/wren,minirop/wren,foresterre/wren,Nave-Neel/wren,minirop/wren,foresterre/wren,foresterre/wren,munificent/wren,bigdimboom/wren,Rohansi/wren,Nelarius/wren,Nelarius/wren,Nelarius/wren,Nelarius/wren,munificent/wren,munificent/wren,minirop/wren,munificent/wren,munificent/wren,bigdimboom/wren,bigdimboom/wren,munificent/wren,Rohansi/wren
#!/usr/bin/env python import sys import os.path import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() def add_file(filename): basename = os.path.basename(filename) # Only include each file at most once. if basename in seen_files: return seen_files.add(basename) path = os.path.dirname(filename) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(os.path.join(path, m.group(1))) else: sys.stdout.write(line) for f in sys.argv[1:]: add_file(f) Add comments for file start/end
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f)
<commit_before>#!/usr/bin/env python import sys import os.path import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() def add_file(filename): basename = os.path.basename(filename) # Only include each file at most once. if basename in seen_files: return seen_files.add(basename) path = os.path.dirname(filename) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(os.path.join(path, m.group(1))) else: sys.stdout.write(line) for f in sys.argv[1:]: add_file(f) <commit_msg>Add comments for file start/end<commit_after>
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f)
#!/usr/bin/env python import sys import os.path import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() def add_file(filename): basename = os.path.basename(filename) # Only include each file at most once. if basename in seen_files: return seen_files.add(basename) path = os.path.dirname(filename) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(os.path.join(path, m.group(1))) else: sys.stdout.write(line) for f in sys.argv[1:]: add_file(f) Add comments for file start/end#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f)
<commit_before>#!/usr/bin/env python import sys import os.path import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() def add_file(filename): basename = os.path.basename(filename) # Only include each file at most once. if basename in seen_files: return seen_files.add(basename) path = os.path.dirname(filename) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(os.path.join(path, m.group(1))) else: sys.stdout.write(line) for f in sys.argv[1:]: add_file(f) <commit_msg>Add comments for file start/end<commit_after>#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f)
76491e672d648c794b0e2cd538b85de763f5f137
test/io/testcjsonwriter.py
test/io/testcjsonwriter.py
# This file is part of cclib (http://cclib.github.io), a library for parsing # and interpreting the results of computational chemistry packages. # # Copyright (C) 2015-2016, the cclib development team # # The library is free software, distributed under the terms of # the GNU Lesser General Public version 2.1 or later. You should have # received a copy of the license along with cclib. You can also access # the full license online at http://www.gnu.org/copyleft/lgpl.html. """Unit tests for writer cjsonwriter module.""" import os import unittest import json import cclib __filedir__ = os.path.dirname(__file__) __filepath__ = os.path.realpath(__filedir__) __datadir__ = os.path.join(__filepath__, "..", "..") class CJSONTest(unittest.TestCase): def setUp(self): self.CJSON = cclib.io.CJSON def test_init(self): """Does the class initialize correctly?""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data) # The object should keep the ccData instance passed to its constructor. self.assertEqual(cjson.ccdata, data) def test_cjson_generation(self): """Does the CJSON format get dumped properly""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data).generate_repr() # if the cjson is generated properly, the data available in the cjson and ccdata # object should be same json_data = json.loads(cjson) number_of_atoms = json_data['properties']['number of atoms'] self.assertEqual(number_of_atoms, data.natom) if __name__ == "__main__": unittest.main()
Test suite for CJSON writer
Test suite for CJSON writer
Python
bsd-3-clause
ATenderholt/cclib,Schamnad/cclib,langner/cclib,langner/cclib,berquist/cclib,cclib/cclib,cclib/cclib,langner/cclib,gaursagar/cclib,gaursagar/cclib,cclib/cclib,Schamnad/cclib,berquist/cclib,ATenderholt/cclib,berquist/cclib
Test suite for CJSON writer
# This file is part of cclib (http://cclib.github.io), a library for parsing # and interpreting the results of computational chemistry packages. # # Copyright (C) 2015-2016, the cclib development team # # The library is free software, distributed under the terms of # the GNU Lesser General Public version 2.1 or later. You should have # received a copy of the license along with cclib. You can also access # the full license online at http://www.gnu.org/copyleft/lgpl.html. """Unit tests for writer cjsonwriter module.""" import os import unittest import json import cclib __filedir__ = os.path.dirname(__file__) __filepath__ = os.path.realpath(__filedir__) __datadir__ = os.path.join(__filepath__, "..", "..") class CJSONTest(unittest.TestCase): def setUp(self): self.CJSON = cclib.io.CJSON def test_init(self): """Does the class initialize correctly?""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data) # The object should keep the ccData instance passed to its constructor. self.assertEqual(cjson.ccdata, data) def test_cjson_generation(self): """Does the CJSON format get dumped properly""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data).generate_repr() # if the cjson is generated properly, the data available in the cjson and ccdata # object should be same json_data = json.loads(cjson) number_of_atoms = json_data['properties']['number of atoms'] self.assertEqual(number_of_atoms, data.natom) if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Test suite for CJSON writer<commit_after>
# This file is part of cclib (http://cclib.github.io), a library for parsing # and interpreting the results of computational chemistry packages. # # Copyright (C) 2015-2016, the cclib development team # # The library is free software, distributed under the terms of # the GNU Lesser General Public version 2.1 or later. You should have # received a copy of the license along with cclib. You can also access # the full license online at http://www.gnu.org/copyleft/lgpl.html. """Unit tests for writer cjsonwriter module.""" import os import unittest import json import cclib __filedir__ = os.path.dirname(__file__) __filepath__ = os.path.realpath(__filedir__) __datadir__ = os.path.join(__filepath__, "..", "..") class CJSONTest(unittest.TestCase): def setUp(self): self.CJSON = cclib.io.CJSON def test_init(self): """Does the class initialize correctly?""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data) # The object should keep the ccData instance passed to its constructor. self.assertEqual(cjson.ccdata, data) def test_cjson_generation(self): """Does the CJSON format get dumped properly""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data).generate_repr() # if the cjson is generated properly, the data available in the cjson and ccdata # object should be same json_data = json.loads(cjson) number_of_atoms = json_data['properties']['number of atoms'] self.assertEqual(number_of_atoms, data.natom) if __name__ == "__main__": unittest.main()
Test suite for CJSON writer# This file is part of cclib (http://cclib.github.io), a library for parsing # and interpreting the results of computational chemistry packages. # # Copyright (C) 2015-2016, the cclib development team # # The library is free software, distributed under the terms of # the GNU Lesser General Public version 2.1 or later. You should have # received a copy of the license along with cclib. You can also access # the full license online at http://www.gnu.org/copyleft/lgpl.html. """Unit tests for writer cjsonwriter module.""" import os import unittest import json import cclib __filedir__ = os.path.dirname(__file__) __filepath__ = os.path.realpath(__filedir__) __datadir__ = os.path.join(__filepath__, "..", "..") class CJSONTest(unittest.TestCase): def setUp(self): self.CJSON = cclib.io.CJSON def test_init(self): """Does the class initialize correctly?""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data) # The object should keep the ccData instance passed to its constructor. self.assertEqual(cjson.ccdata, data) def test_cjson_generation(self): """Does the CJSON format get dumped properly""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data).generate_repr() # if the cjson is generated properly, the data available in the cjson and ccdata # object should be same json_data = json.loads(cjson) number_of_atoms = json_data['properties']['number of atoms'] self.assertEqual(number_of_atoms, data.natom) if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Test suite for CJSON writer<commit_after># This file is part of cclib (http://cclib.github.io), a library for parsing # and interpreting the results of computational chemistry packages. # # Copyright (C) 2015-2016, the cclib development team # # The library is free software, distributed under the terms of # the GNU Lesser General Public version 2.1 or later. You should have # received a copy of the license along with cclib. You can also access # the full license online at http://www.gnu.org/copyleft/lgpl.html. """Unit tests for writer cjsonwriter module.""" import os import unittest import json import cclib __filedir__ = os.path.dirname(__file__) __filepath__ = os.path.realpath(__filedir__) __datadir__ = os.path.join(__filepath__, "..", "..") class CJSONTest(unittest.TestCase): def setUp(self): self.CJSON = cclib.io.CJSON def test_init(self): """Does the class initialize correctly?""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data) # The object should keep the ccData instance passed to its constructor. self.assertEqual(cjson.ccdata, data) def test_cjson_generation(self): """Does the CJSON format get dumped properly""" fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout") data = cclib.io.ccopen(fpath).parse() cjson = cclib.io.cjsonwriter.CJSON(data).generate_repr() # if the cjson is generated properly, the data available in the cjson and ccdata # object should be same json_data = json.loads(cjson) number_of_atoms = json_data['properties']['number of atoms'] self.assertEqual(number_of_atoms, data.natom) if __name__ == "__main__": unittest.main()
5121ee36422681c695be5cd11682d11e8ba4ea0d
src/lavatory/utils/get_artifactory_info.py
src/lavatory/utils/get_artifactory_info.py
"""Helper method for getting artifactory information.""" from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() return storage_info, keys
"""Helper method for getting artifactory information.""" import logging from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() logging.debug('Storage info: %s', storage_info) logging.debug('Keys: %s', keys) return storage_info, keys
Add logging to helper method.
Add logging to helper method.
Python
apache-2.0
gogoair/lavatory
"""Helper method for getting artifactory information.""" from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() return storage_info, keys Add logging to helper method.
"""Helper method for getting artifactory information.""" import logging from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() logging.debug('Storage info: %s', storage_info) logging.debug('Keys: %s', keys) return storage_info, keys
<commit_before>"""Helper method for getting artifactory information.""" from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() return storage_info, keys <commit_msg>Add logging to helper method.<commit_after>
"""Helper method for getting artifactory information.""" import logging from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() logging.debug('Storage info: %s', storage_info) logging.debug('Keys: %s', keys) return storage_info, keys
"""Helper method for getting artifactory information.""" from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() return storage_info, keys Add logging to helper method."""Helper method for getting artifactory information.""" import logging from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() logging.debug('Storage info: %s', storage_info) logging.debug('Keys: %s', keys) return storage_info, keys
<commit_before>"""Helper method for getting artifactory information.""" from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() return storage_info, keys <commit_msg>Add logging to helper method.<commit_after>"""Helper method for getting artifactory information.""" import logging from .artifactory import Artifactory def get_artifactory_info(repo_name=None, repo_type='local'): """Get storage info from Artifactory. Args: repo_name (tuple, optional): Name of artifactory repo. repo_type (str): Type of artifactory repo. Returns: keys (dict, optional): Dictionary of repo data. storage_info (dict): Storage information api call. """ artifactory = Artifactory(repo_name=repo_name) storage_info = artifactory.list(repo_type=repo_type) if repo_name: keys = repo_name else: keys = storage_info.keys() logging.debug('Storage info: %s', storage_info) logging.debug('Keys: %s', keys) return storage_info, keys
9fc53690c8b31fa62391aeec54b29f4ee216402a
test/test_label_install.py
test/test_label_install.py
import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
Revert "Skip install labels test for now"
Revert "Skip install labels test for now" This reverts commit 3016324f9eb84989bcdefa2d3dfe1f766f4ab7e6.
Python
mit
robinedwards/neomodel,robinedwards/neomodel
import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE") Revert "Skip install labels test for now" This reverts commit 3016324f9eb84989bcdefa2d3dfe1f766f4ab7e6.
from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
<commit_before>import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE") <commit_msg>Revert "Skip install labels test for now" This reverts commit 3016324f9eb84989bcdefa2d3dfe1f766f4ab7e6.<commit_after>
from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE") Revert "Skip install labels test for now" This reverts commit 3016324f9eb84989bcdefa2d3dfe1f766f4ab7e6.from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
<commit_before>import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE") <commit_msg>Revert "Skip install labels test for now" This reverts commit 3016324f9eb84989bcdefa2d3dfe1f766f4ab7e6.<commit_after>from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
cc42cf63bc3bf887933635e824cc838204738e30
tests/acceptance/shared.py
tests/acceptance/shared.py
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
Fix acceptance tests: for for button to be visible
Fix acceptance tests: for for button to be visible
Python
agpl-3.0
xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first Fix acceptance tests: for for button to be visible
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
<commit_before>"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first <commit_msg>Fix acceptance tests: for for button to be visible<commit_after>
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first Fix acceptance tests: for for button to be visible"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
<commit_before>"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first <commit_msg>Fix acceptance tests: for for button to be visible<commit_after>"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
739018911befdb6804f26bc1a99dba6faa1313b7
mezzanine/core/auth_backends.py
mezzanine/core/auth_backends.py
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
Fix kwargs usage to work with other auth backends.
Fix kwargs usage to work with other auth backends.
Python
bsd-2-clause
ryneeverett/mezzanine,nikolas/mezzanine,tuxinhang1989/mezzanine,dustinrb/mezzanine,sjuxax/mezzanine,emile2016/mezzanine,orlenko/sfpirg,Kniyl/mezzanine,webounty/mezzanine,molokov/mezzanine,spookylukey/mezzanine,Skytorn86/mezzanine,christianwgd/mezzanine,jjz/mezzanine,agepoly/mezzanine,jjz/mezzanine,tuxinhang1989/mezzanine,PegasusWang/mezzanine,theclanks/mezzanine,adrian-the-git/mezzanine,SoLoHiC/mezzanine,adrian-the-git/mezzanine,agepoly/mezzanine,stephenmcd/mezzanine,SoLoHiC/mezzanine,Kniyl/mezzanine,dsanders11/mezzanine,theclanks/mezzanine,orlenko/plei,stbarnabas/mezzanine,damnfine/mezzanine,gbosh/mezzanine,dovydas/mezzanine,scarcry/snm-mezzanine,promil23/mezzanine,christianwgd/mezzanine,frankchin/mezzanine,industrydive/mezzanine,emile2016/mezzanine,mush42/mezzanine,Skytorn86/mezzanine,sjdines/mezzanine,joshcartme/mezzanine,batpad/mezzanine,saintbird/mezzanine,dovydas/mezzanine,saintbird/mezzanine,ZeroXn/mezzanine,promil23/mezzanine,biomassives/mezzanine,molokov/mezzanine,joshcartme/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,tuxinhang1989/mezzanine,stephenmcd/mezzanine,Kniyl/mezzanine,jjz/mezzanine,readevalprint/mezzanine,wyzex/mezzanine,nikolas/mezzanine,AlexHill/mezzanine,adrian-the-git/mezzanine,gradel/mezzanine,orlenko/plei,SoLoHiC/mezzanine,gradel/mezzanine,vladir/mezzanine,douglaskastle/mezzanine,orlenko/sfpirg,stbarnabas/mezzanine,frankier/mezzanine,fusionbox/mezzanine,sjdines/mezzanine,molokov/mezzanine,wrwrwr/mezzanine,promil23/mezzanine,Skytorn86/mezzanine,ryneeverett/mezzanine,Cajoline/mezzanine,mush42/mezzanine,jerivas/mezzanine,dustinrb/mezzanine,saintbird/mezzanine,viaregio/mezzanine,theclanks/mezzanine,sjdines/mezzanine,cccs-web/mezzanine,wbtuomela/mezzanine,industrydive/mezzanine,gradel/mezzanine,agepoly/mezzanine,vladir/mezzanine,frankier/mezzanine,viaregio/mezzanine,webounty/mezzanine,jerivas/mezzanine,douglaskastle/mezzanine,sjuxax/mezzanine,fusionbox/mezzanine,eino-makitalo/mezzanine,ZeroXn/mezzanine,industrydive/mezzanine,dovydas/mezzanine,dekomote/mezzanine-modeltranslation-backport,joshcartme/mezzanine,frankchin/mezzanine,orlenko/plei,scarcry/snm-mezzanine,PegasusWang/mezzanine,PegasusWang/mezzanine,Cajoline/mezzanine,ZeroXn/mezzanine,orlenko/sfpirg,geodesign/mezzanine,dekomote/mezzanine-modeltranslation-backport,dsanders11/mezzanine,spookylukey/mezzanine,dsanders11/mezzanine,eino-makitalo/mezzanine,viaregio/mezzanine,dustinrb/mezzanine,frankier/mezzanine,ryneeverett/mezzanine,damnfine/mezzanine,wrwrwr/mezzanine,sjuxax/mezzanine,geodesign/mezzanine,wbtuomela/mezzanine,biomassives/mezzanine,biomassives/mezzanine,nikolas/mezzanine,Cicero-Zhao/mezzanine,geodesign/mezzanine,wyzex/mezzanine,AlexHill/mezzanine,wbtuomela/mezzanine,christianwgd/mezzanine,mush42/mezzanine,readevalprint/mezzanine,webounty/mezzanine,batpad/mezzanine,stephenmcd/mezzanine,guibernardino/mezzanine,douglaskastle/mezzanine,spookylukey/mezzanine,emile2016/mezzanine,vladir/mezzanine,eino-makitalo/mezzanine,Cajoline/mezzanine,dekomote/mezzanine-modeltranslation-backport,damnfine/mezzanine,wyzex/mezzanine,gbosh/mezzanine,cccs-web/mezzanine,readevalprint/mezzanine,frankchin/mezzanine,gbosh/mezzanine,guibernardino/mezzanine,scarcry/snm-mezzanine
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user Fix kwargs usage to work with other auth backends.
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
<commit_before> from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user <commit_msg>Fix kwargs usage to work with other auth backends.<commit_after>
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user Fix kwargs usage to work with other auth backends. from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
<commit_before> from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user <commit_msg>Fix kwargs usage to work with other auth backends.<commit_after> from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
869875f26dee4317712afa22f9d13befa2f8d68c
core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox/MicroscopyDropbox.py
core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox/MicroscopyDropbox.py
# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Disabling HDF5 caching HDF5Container.disableCaching() # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run()
# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run()
Remove temparary HDF5 cache workaround.
Remove temparary HDF5 cache workaround.
Python
apache-2.0
aarpon/obit_microscopy_core_technology,aarpon/obit_microscopy_core_technology,aarpon/obit_microscopy_core_technology
# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Disabling HDF5 caching HDF5Container.disableCaching() # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run() Remove temparary HDF5 cache workaround.
# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run()
<commit_before># -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Disabling HDF5 caching HDF5Container.disableCaching() # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run() <commit_msg>Remove temparary HDF5 cache workaround.<commit_after>
# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run()
# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Disabling HDF5 caching HDF5Container.disableCaching() # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run() Remove temparary HDF5 cache workaround.# -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run()
<commit_before># -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Disabling HDF5 caching HDF5Container.disableCaching() # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run() <commit_msg>Remove temparary HDF5 cache workaround.<commit_after># -*- coding: utf-8 -*- """ @author: Aaron Ponti """ import os import logging import re from ch.systemsx.cisd.openbis.common.hdf5 import HDF5Container from Processor import Processor def process(transaction): """Dropbox entry point. @param transaction, the transaction object """ # Get path to containing folder # __file__ does not work (reliably) in Jython dbPath = "../core-plugins/microscopy/1/dss/drop-boxes/MicroscopyDropbox" # Path to the logs subfolder logPath = os.path.join(dbPath, "logs") # Make sure the logs subforder exist if not os.path.exists(logPath): os.makedirs(logPath) # Path for the log file logFile = os.path.join(logPath, "registration_log.txt") # Set up logging logger = logging.getLogger('MicroscopyDropbox') logger.setLevel(logging.DEBUG) fh = logging.FileHandler(logFile) fh.setLevel(logging.DEBUG) format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' formatter = logging.Formatter(format) fh.setFormatter(formatter) logger.addHandler(fh) # Create a Processor processor = Processor(transaction, logger) # Run processor.run()
54a345eb96bce8c3035b402ce009b1e3fda46a42
quran_text/serializers.py
quran_text/serializers.py
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text']
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
Change label and add Sura name to Ayah Serlialzer
Change label and add Sura name to Ayah Serlialzer
Python
mit
EmadMokhtar/tafseer_api
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text'] Change label and add Sura name to Ayah Serlialzer
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
<commit_before>from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text'] <commit_msg>Change label and add Sura name to Ayah Serlialzer<commit_after>
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text'] Change label and add Sura name to Ayah Serlialzerfrom rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
<commit_before>from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text'] <commit_msg>Change label and add Sura name to Ayah Serlialzer<commit_after>from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
bb7741ade270458564ea7546d372e39bbbe0f97d
rds/delete_db_instance.py
rds/delete_db_instance.py
#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # create an rds client rds = boto3.client('rds') # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error
#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] # create an rds client rds = boto3.client('rds') try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error
Swap db and rds set up
Swap db and rds set up
Python
mit
managedkaos/AWS-Python-Boto3
#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # create an rds client rds = boto3.client('rds') # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error Swap db and rds set up
#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] # create an rds client rds = boto3.client('rds') try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error
<commit_before>#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # create an rds client rds = boto3.client('rds') # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error <commit_msg>Swap db and rds set up<commit_after>
#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] # create an rds client rds = boto3.client('rds') try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error
#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # create an rds client rds = boto3.client('rds') # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error Swap db and rds set up#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] # create an rds client rds = boto3.client('rds') try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error
<commit_before>#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # create an rds client rds = boto3.client('rds') # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error <commit_msg>Swap db and rds set up<commit_after>#!/usr/bin/env python # a script to delete an rds instance # import the sys and boto3 libraries import sys import boto3 # use the first argument to the script as the name # of the instance to be deleted db = sys.argv[1] # create an rds client rds = boto3.client('rds') try: # delete the instance and catch the response response = rds.delete_db_instance( DBInstanceIdentifier=db, SkipFinalSnapshot=True) # print the response if there are no exceptions print response # if there is an exception, print the error message except Exception as error: print error
f5d091389a871195fc59a2d003b561a7942e8cce
matl_online/extensions.py
matl_online/extensions.py
"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CsrfProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CsrfProtect()
"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CSRFProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CSRFProtect()
Replace deprecated CsrfProtect with CSRFProtect
Replace deprecated CsrfProtect with CSRFProtect
Python
mit
suever/MATL-Online,suever/MATL-Online,suever/MATL-Online,suever/MATL-Online,suever/MATL-Online
"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CsrfProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CsrfProtect() Replace deprecated CsrfProtect with CSRFProtect
"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CSRFProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CSRFProtect()
<commit_before>"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CsrfProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CsrfProtect() <commit_msg>Replace deprecated CsrfProtect with CSRFProtect<commit_after>
"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CSRFProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CSRFProtect()
"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CsrfProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CsrfProtect() Replace deprecated CsrfProtect with CSRFProtect"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CSRFProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CSRFProtect()
<commit_before>"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CsrfProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CsrfProtect() <commit_msg>Replace deprecated CsrfProtect with CSRFProtect<commit_after>"""Enable third party extensions.""" from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask_wtf import CSRFProtect from celery import Celery from matl_online.settings import Config # Change the timeout for celery process initialization from celery.concurrency import asynpool asynpool.PROC_ALIVE_TIMEOUT = Config.CELERY_PROCESS_INIT_TIMEOUT db = SQLAlchemy() migrate = Migrate() socketio = SocketIO() celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) csrf = CSRFProtect()
ce29f011a72bf695c9b0840ad4c121f85c9fcad1
mica/stats/tests/test_guide_stats.py
mica/stats/tests/test_guide_stats.py
import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn)
import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_calc_stats_with_bright_trans(): s = guide_stats.calc_stats(17472) # Assert that the std on the slot 7 residuals are reasonable # even in this obsid that had a transition to BRIT assert s[1][7]['dr_std'] < 1 def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn)
Add test to confirm more reasonable residual std on one obsid/slot
Add test to confirm more reasonable residual std on one obsid/slot
Python
bsd-3-clause
sot/mica,sot/mica
import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn) Add test to confirm more reasonable residual std on one obsid/slot
import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_calc_stats_with_bright_trans(): s = guide_stats.calc_stats(17472) # Assert that the std on the slot 7 residuals are reasonable # even in this obsid that had a transition to BRIT assert s[1][7]['dr_std'] < 1 def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn)
<commit_before>import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn) <commit_msg>Add test to confirm more reasonable residual std on one obsid/slot<commit_after>
import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_calc_stats_with_bright_trans(): s = guide_stats.calc_stats(17472) # Assert that the std on the slot 7 residuals are reasonable # even in this obsid that had a transition to BRIT assert s[1][7]['dr_std'] < 1 def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn)
import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn) Add test to confirm more reasonable residual std on one obsid/slotimport tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_calc_stats_with_bright_trans(): s = guide_stats.calc_stats(17472) # Assert that the std on the slot 7 residuals are reasonable # even in this obsid that had a transition to BRIT assert s[1][7]['dr_std'] < 1 def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn)
<commit_before>import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn) <commit_msg>Add test to confirm more reasonable residual std on one obsid/slot<commit_after>import tempfile import os from .. import guide_stats def test_calc_stats(): guide_stats.calc_stats(17210) def test_calc_stats_with_bright_trans(): s = guide_stats.calc_stats(17472) # Assert that the std on the slot 7 residuals are reasonable # even in this obsid that had a transition to BRIT assert s[1][7]['dr_std'] < 1 def test_make_gui_stats(): """ Save the guide stats for one obsid into a newly-created table """ # Get a temporary file, but then delete it, because _save_acq_stats will only # make a new table if the supplied file doesn't exist fh, fn = tempfile.mkstemp(suffix='.h5') os.unlink(fn) guide_stats.TABLE_FILE = fn obsid = 20001 obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid) t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp) guide_stats._save_gui_stats(t) os.unlink(fn)
4d1ab55f2bbe8041421002a91dc4f58783913591
services/search_indexes.py
services/search_indexes.py
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, ))
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
Implement language-specific aspects of indexing
Implement language-specific aspects of indexing
Python
bsd-3-clause
theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, )) Implement language-specific aspects of indexing
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
<commit_before>from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, )) <commit_msg>Implement language-specific aspects of indexing<commit_after>
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, )) Implement language-specific aspects of indexingfrom aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
<commit_before>from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): # XXX what about language? concatenate all available languages? return obj.name_en def get_index_queryset(self, language): # XXX exclude objects with blank name for the selected language, not simply for EN return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(name_en='') def get_model(self): return Service def get_search_data(self, service, language, request): # XXX return data for the selected language, not simply for EN return ' '.join(( service.provider.name_en, service.name_en, service.area_of_service.name_en, service.description_en, service.additional_info_en, service.type.name_en, )) <commit_msg>Implement language-specific aspects of indexing<commit_after>from aldryn_search.utils import get_index_base from .models import Service class ServiceIndex(get_index_base()): haystack_use_for_indexing = True index_title = True def get_title(self, obj): return obj.name def get_index_queryset(self, language): # For this language's index, don't include services with no name # provided in this language. return Service.objects.filter(status=Service.STATUS_CURRENT).exclude(**{ 'name_%s' % language: '' }) def get_model(self): return Service def get_search_data(self, service, language, request): description = getattr(service, 'description_%s' % language, '') additional_info = getattr(service, 'additional_info_%s' % language, '') return ' '.join(( service.provider.name, service.name, service.area_of_service.name, description, additional_info, service.type.name, ))
92f7ca684ed3e9113ccc709442a02b2c14fe662e
opengrid/tests/test_plotting.py
opengrid/tests/test_plotting.py
# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import unittest class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import matplotlib import unittest matplotlib.use('Agg') class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main()
Resolve RuntimeError: Invalid DISPLAY variable
[TST] Resolve RuntimeError: Invalid DISPLAY variable
Python
apache-2.0
opengridcc/opengrid
# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import unittest class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main() [TST] Resolve RuntimeError: Invalid DISPLAY variable
# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import matplotlib import unittest matplotlib.use('Agg') class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main()
<commit_before># -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import unittest class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main() <commit_msg>[TST] Resolve RuntimeError: Invalid DISPLAY variable<commit_after>
# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import matplotlib import unittest matplotlib.use('Agg') class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import unittest class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main() [TST] Resolve RuntimeError: Invalid DISPLAY variable# -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import matplotlib import unittest matplotlib.use('Agg') class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main()
<commit_before># -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import unittest class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main() <commit_msg>[TST] Resolve RuntimeError: Invalid DISPLAY variable<commit_after># -*- coding: utf-8 -*- """ Created on Mon Dec 30 02:37:25 2013 @author: Jan """ import matplotlib import unittest matplotlib.use('Agg') class PlotStyleTest(unittest.TestCase): def test_default(self): from opengrid.library.plotting import plot_style plt = plot_style() class CarpetTest(unittest.TestCase): def test_default(self): import numpy as np import pandas as pd from opengrid.library import plotting index = pd.date_range('2015-1-1', '2015-12-31', freq='h') ser = pd.Series(np.random.normal(size=len(index)), index=index, name='abc') plotting.carpet(ser) if __name__ == '__main__': unittest.main()
85999e2024027e45015fb2f2417867a6d9f324c7
EditorConfig.py
EditorConfig.py
import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): try: config = get_properties(view.file_name()) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() # EOL view.set_line_endings(LINE_ENDINGS[config['end_of_line']]) # Indent type settings.set('translate_tabs_to_spaces', config['indent_style'] == 'space') # Indent size settings.set('tab_size', int(config['indent_size'])) else: print 'There seems to be an error with your .editorconfig file'
import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): path = view.file_name() if not path: return try: config = get_properties(path) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() window = view.window() end_of_line = config.get('end_of_line') indent_style = config.get('indent_style') indent_size = config.get('indent_size') # Indent type if indent_style == 'tab': window.run_command('unexpand_tabs', {'set_translate_tabs': False}) if indent_style == 'space': window.run_command('expand_tabs', {'set_translate_tabs': True}) # Indent size if indent_size: settings.set('tab_size', int(indent_size)) # EOL if end_of_line: view.set_line_endings(LINE_ENDINGS[end_of_line])
Fix plugin not taking into account opening of unsaved buffers and some refactoring
Fix plugin not taking into account opening of unsaved buffers and some refactoring
Python
mit
rivy/editorconfig-sublime,gatero/editorconfig-sublime,sindresorhus/editorconfig-sublime,SerkanSipahi/editorconfig-sublime
import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): try: config = get_properties(view.file_name()) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() # EOL view.set_line_endings(LINE_ENDINGS[config['end_of_line']]) # Indent type settings.set('translate_tabs_to_spaces', config['indent_style'] == 'space') # Indent size settings.set('tab_size', int(config['indent_size'])) else: print 'There seems to be an error with your .editorconfig file'Fix plugin not taking into account opening of unsaved buffers and some refactoring
import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): path = view.file_name() if not path: return try: config = get_properties(path) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() window = view.window() end_of_line = config.get('end_of_line') indent_style = config.get('indent_style') indent_size = config.get('indent_size') # Indent type if indent_style == 'tab': window.run_command('unexpand_tabs', {'set_translate_tabs': False}) if indent_style == 'space': window.run_command('expand_tabs', {'set_translate_tabs': True}) # Indent size if indent_size: settings.set('tab_size', int(indent_size)) # EOL if end_of_line: view.set_line_endings(LINE_ENDINGS[end_of_line])
<commit_before>import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): try: config = get_properties(view.file_name()) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() # EOL view.set_line_endings(LINE_ENDINGS[config['end_of_line']]) # Indent type settings.set('translate_tabs_to_spaces', config['indent_style'] == 'space') # Indent size settings.set('tab_size', int(config['indent_size'])) else: print 'There seems to be an error with your .editorconfig file'<commit_msg>Fix plugin not taking into account opening of unsaved buffers and some refactoring<commit_after>
import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): path = view.file_name() if not path: return try: config = get_properties(path) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() window = view.window() end_of_line = config.get('end_of_line') indent_style = config.get('indent_style') indent_size = config.get('indent_size') # Indent type if indent_style == 'tab': window.run_command('unexpand_tabs', {'set_translate_tabs': False}) if indent_style == 'space': window.run_command('expand_tabs', {'set_translate_tabs': True}) # Indent size if indent_size: settings.set('tab_size', int(indent_size)) # EOL if end_of_line: view.set_line_endings(LINE_ENDINGS[end_of_line])
import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): try: config = get_properties(view.file_name()) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() # EOL view.set_line_endings(LINE_ENDINGS[config['end_of_line']]) # Indent type settings.set('translate_tabs_to_spaces', config['indent_style'] == 'space') # Indent size settings.set('tab_size', int(config['indent_size'])) else: print 'There seems to be an error with your .editorconfig file'Fix plugin not taking into account opening of unsaved buffers and some refactoringimport sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): path = view.file_name() if not path: return try: config = get_properties(path) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() window = view.window() end_of_line = config.get('end_of_line') indent_style = config.get('indent_style') indent_size = config.get('indent_size') # Indent type if indent_style == 'tab': window.run_command('unexpand_tabs', {'set_translate_tabs': False}) if indent_style == 'space': window.run_command('expand_tabs', {'set_translate_tabs': True}) # Indent size if indent_size: settings.set('tab_size', int(indent_size)) # EOL if end_of_line: view.set_line_endings(LINE_ENDINGS[end_of_line])
<commit_before>import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): try: config = get_properties(view.file_name()) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() # EOL view.set_line_endings(LINE_ENDINGS[config['end_of_line']]) # Indent type settings.set('translate_tabs_to_spaces', config['indent_style'] == 'space') # Indent size settings.set('tab_size', int(config['indent_size'])) else: print 'There seems to be an error with your .editorconfig file'<commit_msg>Fix plugin not taking into account opening of unsaved buffers and some refactoring<commit_after>import sublime_plugin from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'Unix', 'crlf': 'Windows', 'cr': 'CR' } class EditorConfig(sublime_plugin.EventListener): def on_load(self, view): path = view.file_name() if not path: return try: config = get_properties(path) except EditorConfigError: print 'Error occurred while getting EditorConfig properties' else: if config: settings = view.settings() window = view.window() end_of_line = config.get('end_of_line') indent_style = config.get('indent_style') indent_size = config.get('indent_size') # Indent type if indent_style == 'tab': window.run_command('unexpand_tabs', {'set_translate_tabs': False}) if indent_style == 'space': window.run_command('expand_tabs', {'set_translate_tabs': True}) # Indent size if indent_size: settings.set('tab_size', int(indent_size)) # EOL if end_of_line: view.set_line_endings(LINE_ENDINGS[end_of_line])
59945afd33c918b6aa095790263991aaccf86157
osgtest/tests/test_85_lcmaps.py
osgtest/tests/test_85_lcmaps.py
import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class RestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps')
import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class TestRestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps')
Standardize lcmaps cleanup class name
Standardize lcmaps cleanup class name git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@21921 4e558342-562e-0410-864c-e07659590f8c
Python
apache-2.0
efajardo/osg-test,efajardo/osg-test
import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class RestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps') Standardize lcmaps cleanup class name git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@21921 4e558342-562e-0410-864c-e07659590f8c
import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class TestRestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps')
<commit_before>import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class RestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps') <commit_msg>Standardize lcmaps cleanup class name git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@21921 4e558342-562e-0410-864c-e07659590f8c<commit_after>
import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class TestRestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps')
import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class RestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps') Standardize lcmaps cleanup class name git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@21921 4e558342-562e-0410-864c-e07659590f8cimport unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class TestRestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps')
<commit_before>import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class RestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps') <commit_msg>Standardize lcmaps cleanup class name git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@21921 4e558342-562e-0410-864c-e07659590f8c<commit_after>import unittest import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest class TestRestoreLcMaps(osgunittest.OSGTestCase): def test_01_restore_lcmaps_after_glexec(self): core.skip_ok_unless_installed('glexec') files.restore('/etc/lcmaps.db', 'lcmaps')
433167fb55378a73f4c0a808728f572de9d03e62
swimlane/core/fields/valueslist.py
swimlane/core/fields/valueslist.py
import six from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValueError('Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) )) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value }
import six from swimlane.exceptions import ValidationError from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) ) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value }
Convert ValueError -> ValidationError in ValuesListField
Convert ValueError -> ValidationError in ValuesListField
Python
mit
Swimlane/sw-python-client
import six from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValueError('Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) )) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value } Convert ValueError -> ValidationError in ValuesListField
import six from swimlane.exceptions import ValidationError from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) ) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value }
<commit_before>import six from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValueError('Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) )) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value } <commit_msg>Convert ValueError -> ValidationError in ValuesListField<commit_after>
import six from swimlane.exceptions import ValidationError from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) ) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value }
import six from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValueError('Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) )) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value } Convert ValueError -> ValidationError in ValuesListFieldimport six from swimlane.exceptions import ValidationError from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) ) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value }
<commit_before>import six from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValueError('Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) )) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value } <commit_msg>Convert ValueError -> ValidationError in ValuesListField<commit_after>import six from swimlane.exceptions import ValidationError from .base import MultiSelectField class ValuesListField(MultiSelectField): field_type = 'Core.Models.Fields.ValuesListField, Core' supported_types = six.string_types def __init__(self, *args, **kwargs): """Map names to IDs for use in field rehydration""" super(ValuesListField, self).__init__(*args, **kwargs) self.selection_to_id_map = {f['name']: f['id'] for f in self.field_definition['values']} def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) ) def cast_to_python(self, value): """Store actual value as internal representation""" if value is not None: value = value['value'] return value def cast_to_swimlane(self, value): """Rehydrate value back as full JSON representation""" if value is None: return value return { '$type': 'Core.Models.Record.ValueSelection, Core', 'id': self.selection_to_id_map[value], 'value': value }
ea391280843f946200657635df5934d644306210
smarty/tests/test_utils.py
smarty/tests/test_utils.py
from functools import partial from smarty.utils import get_data_filename from unittest import TestCase import smarty class TestUtils(TestCase): def test_read_molecules(self): molecules = smarty.utils.read_molecules(get_data_filename('molecules/zinc-subset-tripos.mol2.gz'), verbose=False)
Add simple test for utils
Add simple test for utils
Python
mit
openforcefield/openff-toolkit,open-forcefield-group/openforcefield,openforcefield/openff-toolkit,open-forcefield-group/openforcefield,open-forcefield-group/openforcefield
Add simple test for utils
from functools import partial from smarty.utils import get_data_filename from unittest import TestCase import smarty class TestUtils(TestCase): def test_read_molecules(self): molecules = smarty.utils.read_molecules(get_data_filename('molecules/zinc-subset-tripos.mol2.gz'), verbose=False)
<commit_before><commit_msg>Add simple test for utils<commit_after>
from functools import partial from smarty.utils import get_data_filename from unittest import TestCase import smarty class TestUtils(TestCase): def test_read_molecules(self): molecules = smarty.utils.read_molecules(get_data_filename('molecules/zinc-subset-tripos.mol2.gz'), verbose=False)
Add simple test for utilsfrom functools import partial from smarty.utils import get_data_filename from unittest import TestCase import smarty class TestUtils(TestCase): def test_read_molecules(self): molecules = smarty.utils.read_molecules(get_data_filename('molecules/zinc-subset-tripos.mol2.gz'), verbose=False)
<commit_before><commit_msg>Add simple test for utils<commit_after>from functools import partial from smarty.utils import get_data_filename from unittest import TestCase import smarty class TestUtils(TestCase): def test_read_molecules(self): molecules = smarty.utils.read_molecules(get_data_filename('molecules/zinc-subset-tripos.mol2.gz'), verbose=False)
8d70645168ea4962359d67b00926f29544f4c506
organizations/managers.py
organizations/managers.py
from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_query_set(self): return super(ActiveOrgManager, self).get_query_set().filter(is_active=True)
from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): if hasattr(self, 'get_queryset'): return self.get_queryset().filter(users=user) else: # Deprecated method for older versions of Django return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_queryset(self): try: return super(ActiveOrgManager, self).get_queryset().filter(is_active=True) except AttributeError: # Deprecated method for older versions of Django. return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) get_query_set = get_queryset
Use get_queryset method by default
Use get_queryset method by default Adds handler for get_query_set where the former method is not available in the base manager class. Closes gh-48
Python
bsd-2-clause
GauthamGoli/django-organizations,DESHRAJ/django-organizations,bennylope/django-organizations,st8st8/django-organizations,GauthamGoli/django-organizations,DESHRAJ/django-organizations,bennylope/django-organizations,st8st8/django-organizations
from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_query_set(self): return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) Use get_queryset method by default Adds handler for get_query_set where the former method is not available in the base manager class. Closes gh-48
from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): if hasattr(self, 'get_queryset'): return self.get_queryset().filter(users=user) else: # Deprecated method for older versions of Django return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_queryset(self): try: return super(ActiveOrgManager, self).get_queryset().filter(is_active=True) except AttributeError: # Deprecated method for older versions of Django. return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) get_query_set = get_queryset
<commit_before>from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_query_set(self): return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) <commit_msg>Use get_queryset method by default Adds handler for get_query_set where the former method is not available in the base manager class. Closes gh-48<commit_after>
from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): if hasattr(self, 'get_queryset'): return self.get_queryset().filter(users=user) else: # Deprecated method for older versions of Django return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_queryset(self): try: return super(ActiveOrgManager, self).get_queryset().filter(is_active=True) except AttributeError: # Deprecated method for older versions of Django. return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) get_query_set = get_queryset
from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_query_set(self): return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) Use get_queryset method by default Adds handler for get_query_set where the former method is not available in the base manager class. Closes gh-48from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): if hasattr(self, 'get_queryset'): return self.get_queryset().filter(users=user) else: # Deprecated method for older versions of Django return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_queryset(self): try: return super(ActiveOrgManager, self).get_queryset().filter(is_active=True) except AttributeError: # Deprecated method for older versions of Django. return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) get_query_set = get_queryset
<commit_before>from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_query_set(self): return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) <commit_msg>Use get_queryset method by default Adds handler for get_query_set where the former method is not available in the base manager class. Closes gh-48<commit_after>from django.db import models class OrgManager(models.Manager): def get_for_user(self, user): if hasattr(self, 'get_queryset'): return self.get_queryset().filter(users=user) else: # Deprecated method for older versions of Django return self.get_query_set().filter(users=user) class ActiveOrgManager(OrgManager): """ A more useful extension of the default manager which returns querysets including only active organizations """ def get_queryset(self): try: return super(ActiveOrgManager, self).get_queryset().filter(is_active=True) except AttributeError: # Deprecated method for older versions of Django. return super(ActiveOrgManager, self).get_query_set().filter(is_active=True) get_query_set = get_queryset
0c495fa6254800f1b7a4dbc66424a24c1d539280
cobs3/__init__.py
cobs3/__init__.py
""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from cobs._cobsext import * _using_extension = True except ImportError: from cobs._cobspy import * _using_extension = False
""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from ._cobsext import * _using_extension = True except ImportError: from ._cobspy import * _using_extension = False
Change Python 3.x imports to relative.
Change Python 3.x imports to relative.
Python
mit
cmcqueen/cobs-python,cmcqueen/cobs-python
""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from cobs._cobsext import * _using_extension = True except ImportError: from cobs._cobspy import * _using_extension = False Change Python 3.x imports to relative.
""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from ._cobsext import * _using_extension = True except ImportError: from ._cobspy import * _using_extension = False
<commit_before>""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from cobs._cobsext import * _using_extension = True except ImportError: from cobs._cobspy import * _using_extension = False <commit_msg>Change Python 3.x imports to relative.<commit_after>
""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from ._cobsext import * _using_extension = True except ImportError: from ._cobspy import * _using_extension = False
""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from cobs._cobsext import * _using_extension = True except ImportError: from cobs._cobspy import * _using_extension = False Change Python 3.x imports to relative.""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from ._cobsext import * _using_extension = True except ImportError: from ._cobspy import * _using_extension = False
<commit_before>""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from cobs._cobsext import * _using_extension = True except ImportError: from cobs._cobspy import * _using_extension = False <commit_msg>Change Python 3.x imports to relative.<commit_after>""" Consistent Overhead Byte Stuffing (COBS) encoding and decoding. Functions are provided for encoding and decoding according to the basic COBS method. The COBS variant "Zero Pair Elimination" (ZPE) is not implemented. A pure Python implementation and a C extension implementation are provided. If the C extension is not available for some reason, the pure Python version will be used. References: http://www.stuartcheshire.org/papers/COBSforSIGCOMM/ http://www.stuartcheshire.org/papers/COBSforToN.pdf """ try: from ._cobsext import * _using_extension = True except ImportError: from ._cobspy import * _using_extension = False
fbc757281aa6f0bdbba57fb21c89553a7274f58d
billjobs/tests/tests_model.py
billjobs/tests/tests_model.py
from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual(bill.billing_address, user.billing_address)
from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) bill.save() self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual( bill.billing_address, user.userprofile.billing_address)
Save bill fixtures to test recorded values
Save bill fixtures to test recorded values
Python
mit
ioO/billjobs
from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual(bill.billing_address, user.billing_address) Save bill fixtures to test recorded values
from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) bill.save() self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual( bill.billing_address, user.userprofile.billing_address)
<commit_before>from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual(bill.billing_address, user.billing_address) <commit_msg>Save bill fixtures to test recorded values<commit_after>
from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) bill.save() self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual( bill.billing_address, user.userprofile.billing_address)
from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual(bill.billing_address, user.billing_address) Save bill fixtures to test recorded valuesfrom django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) bill.save() self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual( bill.billing_address, user.userprofile.billing_address)
<commit_before>from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual(bill.billing_address, user.billing_address) <commit_msg>Save bill fixtures to test recorded values<commit_after>from django.test import TestCase, Client from django.contrib.auth.models import User from billjobs.models import Bill, Service from billjobs.settings import BILLJOBS_BILL_ISSUER class BillingTestCase(TestCase): ''' Test billing creation and modification ''' fixtures = ['dev_data.json'] def setUp(self): self.client = Client() self.client.login(username='bill', password='jobs') def tearDown(self): self.client.logout() def test_create_bill_with_one_line(self): ''' Test when user is created a bill with a single service ''' #response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True) #self.assertEqual(response.status_code, 200) self.assertTrue(True) def test_create_bill(self): user = User.objects.get(username='bill') bill = Bill(user=user) bill.save() self.assertEqual(bill.user.username, 'bill') self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER) self.assertEqual( bill.billing_address, user.userprofile.billing_address)
a66040ec9c00b26cf2c8c8454d623a950fb63ea4
kitchen/dashboard/templatetags/filters.py
kitchen/dashboard/templatetags/filters.py
"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list} or [])
"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list or []})
Fix recipe filter return statement
Fix recipe filter return statement
Python
apache-2.0
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list} or []) Fix recipe filter return statement
"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list or []})
<commit_before>"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list} or []) <commit_msg>Fix recipe filter return statement<commit_after>
"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list or []})
"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list} or []) Fix recipe filter return statement"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list or []})
<commit_before>"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list} or []) <commit_msg>Fix recipe filter return statement<commit_after>"""Dashboard template filters""" from django import template import littlechef from kitchen.settings import REPO register = template.Library() @register.filter(name='get_role_list') def get_role_list(run_list): """Returns the role sublist from the given run_list""" if run_list: all_roles = littlechef.lib.get_roles_in_node( {'run_list': run_list}) role_list = [] for role in all_roles: if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']): # Only add if it doesn't start with excluded role prefixes role_list.append(role) return role_list else: return [] @register.filter(name='get_recipe_list') def get_recipe_list(run_list): """Returns the recipe sublist from the given run_list""" return littlechef.lib.get_recipes_in_node({'run_list': run_list or []})
7630ca3a1ced1a29f428efd2e60ce02a7a6c5869
bin/update/deploy_dev_base.py
bin/update/deploy_dev_base.py
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True)
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True)
Stop truncating the DB for dev/demo pushes.
Stop truncating the DB for dev/demo pushes.
Python
mpl-2.0
alexgibson/bedrock,jpetto/bedrock,MichaelKohler/bedrock,CSCI-462-01-2017/bedrock,CSCI-462-01-2017/bedrock,hoosteeno/bedrock,l-hedgehog/bedrock,Sancus/bedrock,alexgibson/bedrock,gerv/bedrock,gauthierm/bedrock,analytics-pros/mozilla-bedrock,mermi/bedrock,jpetto/bedrock,pascalchevrel/bedrock,l-hedgehog/bedrock,hoosteeno/bedrock,TheJJ100100/bedrock,jpetto/bedrock,mkmelin/bedrock,ericawright/bedrock,mozilla/bedrock,jpetto/bedrock,CSCI-462-01-2017/bedrock,l-hedgehog/bedrock,gauthierm/bedrock,glogiotatidis/bedrock,gauthierm/bedrock,schalkneethling/bedrock,mermi/bedrock,kyoshino/bedrock,davehunt/bedrock,hoosteeno/bedrock,ericawright/bedrock,sgarrity/bedrock,kyoshino/bedrock,hoosteeno/bedrock,gerv/bedrock,craigcook/bedrock,flodolo/bedrock,craigcook/bedrock,pascalchevrel/bedrock,TheoChevalier/bedrock,flodolo/bedrock,pmclanahan/bedrock,analytics-pros/mozilla-bedrock,sylvestre/bedrock,pmclanahan/bedrock,TheoChevalier/bedrock,flodolo/bedrock,mkmelin/bedrock,davehunt/bedrock,mkmelin/bedrock,CSCI-462-01-2017/bedrock,mermi/bedrock,MichaelKohler/bedrock,davehunt/bedrock,gerv/bedrock,Sancus/bedrock,analytics-pros/mozilla-bedrock,pmclanahan/bedrock,pmclanahan/bedrock,ericawright/bedrock,glogiotatidis/bedrock,sylvestre/bedrock,schalkneethling/bedrock,glogiotatidis/bedrock,sgarrity/bedrock,TheJJ100100/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,sylvestre/bedrock,schalkneethling/bedrock,analytics-pros/mozilla-bedrock,gauthierm/bedrock,TheJJ100100/bedrock,alexgibson/bedrock,pascalchevrel/bedrock,TheoChevalier/bedrock,sgarrity/bedrock,TheJJ100100/bedrock,MichaelKohler/bedrock,TheoChevalier/bedrock,craigcook/bedrock,Sancus/bedrock,Sancus/bedrock,glogiotatidis/bedrock,schalkneethling/bedrock,alexgibson/bedrock,kyoshino/bedrock,jgmize/bedrock,mozilla/bedrock,l-hedgehog/bedrock,jgmize/bedrock,ericawright/bedrock,jgmize/bedrock,mozilla/bedrock,mermi/bedrock,craigcook/bedrock,kyoshino/bedrock,davehunt/bedrock,jgmize/bedrock,sgarrity/bedrock,gerv/bedrock,sylvestre/bedrock,mkmelin/bedrock,mozilla/bedrock,flodolo/bedrock
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True) Stop truncating the DB for dev/demo pushes.
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True)
<commit_before>import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True) <commit_msg>Stop truncating the DB for dev/demo pushes.<commit_after>
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True)
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True) Stop truncating the DB for dev/demo pushes.import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True)
<commit_before>import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True) <commit_msg>Stop truncating the DB for dev/demo pushes.<commit_after>import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) base_update_assets = update_assets base_database = database @task def database(ctx): # only ever run this one on demo and dev. base_database() management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_ical_feeds') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds') @task def update_assets(ctx): """Compile/compress static assets and fetch external data.""" base_update_assets() # can't do this in `database` because it needs to run before # the file sync from SRC -> WWW. management_cmd(ctx, 'update_product_details', use_src_dir=True)
6dc47f932b5c7f84918ec730b3ccd03d74070453
app/py/cuda_sort/app_specific.py
app/py/cuda_sort/app_specific.py
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
Sort plg: fix caret pos after 'delete empty lines'
Sort plg: fix caret pos after 'delete empty lines'
Python
mpl-2.0
Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines() Sort plg: fix caret pos after 'delete empty lines'
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
<commit_before>import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines() <commit_msg>Sort plg: fix caret pos after 'delete empty lines'<commit_after>
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines() Sort plg: fix caret pos after 'delete empty lines'import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
<commit_before>import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines() <commit_msg>Sort plg: fix caret pos after 'delete empty lines'<commit_after>import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
0a2baec53bba75a7940a3f1eaf7743c601b711f2
plugins/RemovableDriveOutputDevice/__init__.py
plugins/RemovableDriveOutputDevice/__init__.py
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("uranium") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { }
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { }
Use the right catalog for RemovableDriveOutputDevice
Use the right catalog for RemovableDriveOutputDevice
Python
agpl-3.0
ad1217/Cura,markwal/Cura,bq/Ultimaker-Cura,bq/Ultimaker-Cura,ad1217/Cura,ynotstartups/Wanhao,fieldOfView/Cura,ynotstartups/Wanhao,totalretribution/Cura,hmflash/Cura,Curahelper/Cura,fxtentacle/Cura,senttech/Cura,fieldOfView/Cura,fxtentacle/Cura,totalretribution/Cura,markwal/Cura,hmflash/Cura,Curahelper/Cura,senttech/Cura
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("uranium") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { } Use the right catalog for RemovableDriveOutputDevice
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { }
<commit_before># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("uranium") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { } <commit_msg>Use the right catalog for RemovableDriveOutputDevice<commit_after>
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { }
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("uranium") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { } Use the right catalog for RemovableDriveOutputDevice# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { }
<commit_before># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("uranium") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { } <commit_msg>Use the right catalog for RemovableDriveOutputDevice<commit_after># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. import platform from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Removable Drive Output Device Plugin"), "author": "Ultimaker B.V.", "description": catalog.i18nc("@info:whatsthis", "Provides removable drive hotplugging and writing support"), "version": "1.0", "api": 2 } } def register(app): if platform.system() == "Windows": from . import WindowsRemovableDrivePlugin return { "output_device": WindowsRemovableDrivePlugin.WindowsRemovableDrivePlugin() } elif platform.system() == "Darwin": from . import OSXRemovableDrivePlugin return { "output_device": OSXRemovableDrivePlugin.OSXRemovableDrivePlugin() } elif platform.system() == "Linux": from . import LinuxRemovableDrivePlugin return { "output_device": LinuxRemovableDrivePlugin.LinuxRemovableDrivePlugin() } else: Logger.log("e", "Unsupported system %s, no removable device hotplugging support available.", platform.system()) return { }
ab68bce48b500d1ba9f7f5fa749f179ad78b5fdf
test/on_yubikey/cli_piv/test_misc.py
test/on_yubikey/cli_piv/test_misc.py
import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') return [Misc]
import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') def test_write_read_preserves_ansi_escapes(self): red = b'\x00\x1b[31m' blue = b'\x00\x1b[34m' reset = b'\x00\x1b[0m' data = (b'Hello, ' + red + b'red' + reset + b' and ' + blue + b'blue' + reset + b' world!') ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) output_data = ykman_cli.with_bytes_output( 'piv', 'read-object', '0x5f0001') self.assertEqual(data, output_data) return [Misc]
Test that piv read-object preserves ANSI escape codes
Test that piv read-object preserves ANSI escape codes Objects written might (accidentally?) contain such codes, so they should be preserved when read back out. For example, there's a 281 in 10^12 chance that any six random bytes happen to make the escape code for red text colour.
Python
bsd-2-clause
Yubico/yubikey-manager,Yubico/yubikey-manager
import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') return [Misc] Test that piv read-object preserves ANSI escape codes Objects written might (accidentally?) contain such codes, so they should be preserved when read back out. For example, there's a 281 in 10^12 chance that any six random bytes happen to make the escape code for red text colour.
import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') def test_write_read_preserves_ansi_escapes(self): red = b'\x00\x1b[31m' blue = b'\x00\x1b[34m' reset = b'\x00\x1b[0m' data = (b'Hello, ' + red + b'red' + reset + b' and ' + blue + b'blue' + reset + b' world!') ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) output_data = ykman_cli.with_bytes_output( 'piv', 'read-object', '0x5f0001') self.assertEqual(data, output_data) return [Misc]
<commit_before>import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') return [Misc] <commit_msg>Test that piv read-object preserves ANSI escape codes Objects written might (accidentally?) contain such codes, so they should be preserved when read back out. For example, there's a 281 in 10^12 chance that any six random bytes happen to make the escape code for red text colour.<commit_after>
import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') def test_write_read_preserves_ansi_escapes(self): red = b'\x00\x1b[31m' blue = b'\x00\x1b[34m' reset = b'\x00\x1b[0m' data = (b'Hello, ' + red + b'red' + reset + b' and ' + blue + b'blue' + reset + b' world!') ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) output_data = ykman_cli.with_bytes_output( 'piv', 'read-object', '0x5f0001') self.assertEqual(data, output_data) return [Misc]
import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') return [Misc] Test that piv read-object preserves ANSI escape codes Objects written might (accidentally?) contain such codes, so they should be preserved when read back out. For example, there's a 281 in 10^12 chance that any six random bytes happen to make the escape code for red text colour.import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') def test_write_read_preserves_ansi_escapes(self): red = b'\x00\x1b[31m' blue = b'\x00\x1b[34m' reset = b'\x00\x1b[0m' data = (b'Hello, ' + red + b'red' + reset + b' and ' + blue + b'blue' + reset + b' world!') ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) output_data = ykman_cli.with_bytes_output( 'piv', 'read-object', '0x5f0001') self.assertEqual(data, output_data) return [Misc]
<commit_before>import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') return [Misc] <commit_msg>Test that piv read-object preserves ANSI escape codes Objects written might (accidentally?) contain such codes, so they should be preserved when read back out. For example, there's a 281 in 10^12 chance that any six random bytes happen to make the escape code for red text colour.<commit_after>import unittest from ..framework import cli_test_suite from .util import DEFAULT_MANAGEMENT_KEY @cli_test_suite def additional_tests(ykman_cli): class Misc(unittest.TestCase): def setUp(self): ykman_cli('piv', 'reset', '-f') def test_info(self): output = ykman_cli('piv', 'info') self.assertIn('PIV version:', output) def test_reset(self): output = ykman_cli('piv', 'reset', '-f') self.assertIn('Success!', output) def test_write_read_object(self): data = 'test data' for i in range(0, 3): ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) data = ykman_cli('piv', 'read-object', '0x5f0001') self.assertEqual(data, 'test data') def test_write_read_preserves_ansi_escapes(self): red = b'\x00\x1b[31m' blue = b'\x00\x1b[34m' reset = b'\x00\x1b[0m' data = (b'Hello, ' + red + b'red' + reset + b' and ' + blue + b'blue' + reset + b' world!') ykman_cli( 'piv', 'write-object', '-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001', '-', input=data) output_data = ykman_cli.with_bytes_output( 'piv', 'read-object', '0x5f0001') self.assertEqual(data, output_data) return [Misc]
bd9faff14b7ed7aa87b51c93346c0b7349430d64
apps/domain/tests/conftest.py
apps/domain/tests/conftest.py
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db from main.core.node import GridDomain @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture def domain(): return GridDomain(name="testing") @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db
ADD a new fixture (GridDomain instance)
ADD a new fixture (GridDomain instance)
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db ADD a new fixture (GridDomain instance)
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db from main.core.node import GridDomain @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture def domain(): return GridDomain(name="testing") @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db
<commit_before>import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db <commit_msg>ADD a new fixture (GridDomain instance)<commit_after>
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db from main.core.node import GridDomain @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture def domain(): return GridDomain(name="testing") @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db
import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db ADD a new fixture (GridDomain instance)import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db from main.core.node import GridDomain @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture def domain(): return GridDomain(name="testing") @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db
<commit_before>import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db <commit_msg>ADD a new fixture (GridDomain instance)<commit_after>import pytest import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.append(myPath + "/../src/") from app import create_app from main.core.database import db from main.core.node import GridDomain @pytest.fixture(scope="function", autouse=True) def app(): db_path = "sqlite:///:memory:" return create_app(debug=True, test_config={"SQLALCHEMY_DATABASE_URI": db_path}) @pytest.fixture def client(app): return app.test_client() @pytest.fixture def domain(): return GridDomain(name="testing") @pytest.fixture(scope="function") def database(app): test_db = db test_db.init_app(app) app.app_context().push() test_db.create_all() return test_db
bad8133c6714a25ad764419302f4db0da3f39952
spec_cleaner/rpminstall.py
spec_cleaner/rpminstall.py
# vim: set ts=4 sw=4 et: coding=UTF-8 import string from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = string.replace(line, '%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line
# vim: set ts=4 sw=4 et: coding=UTF-8 from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = line.replace('%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line
Fix test failures on py3.
Fix test failures on py3.
Python
bsd-3-clause
plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner
# vim: set ts=4 sw=4 et: coding=UTF-8 import string from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = string.replace(line, '%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line Fix test failures on py3.
# vim: set ts=4 sw=4 et: coding=UTF-8 from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = line.replace('%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line
<commit_before># vim: set ts=4 sw=4 et: coding=UTF-8 import string from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = string.replace(line, '%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line <commit_msg>Fix test failures on py3.<commit_after>
# vim: set ts=4 sw=4 et: coding=UTF-8 from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = line.replace('%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line
# vim: set ts=4 sw=4 et: coding=UTF-8 import string from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = string.replace(line, '%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line Fix test failures on py3.# vim: set ts=4 sw=4 et: coding=UTF-8 from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = line.replace('%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line
<commit_before># vim: set ts=4 sw=4 et: coding=UTF-8 import string from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = string.replace(line, '%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line <commit_msg>Fix test failures on py3.<commit_after># vim: set ts=4 sw=4 et: coding=UTF-8 from rpmsection import Section class RpmInstall(Section): ''' Remove commands that wipe out the build root. Replace %makeinstall (suse-ism). ''' def add(self, line): install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}' line = self._complete_cleanup(line) line = self._replace_remove_la(line) # we do not want to cleanup buildroot, it is already clean if self.reg.re_clean.search(line): return # do not use install macros as we have trouble with it for now # we can convert it later on if self.reg.re_install.match(line): line = install_command # we can deal with additional params for %makeinstall so replace that too line = line.replace('%{makeinstall}', install_command) Section.add(self, line) def _replace_remove_la(self, line): """ Replace all known variations of la file deletion with one unified """ if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \ (self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2): line = 'find %{buildroot} -type f -name "*.la" -delete -print' return line
619fa5a8345a32eb2913b85f01b9d2bc8453b688
sms_939/controllers/sms_notification_controller.py
sms_939/controllers/sms_notification_controller.py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay() if async else notification_env) \ .send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer()
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay(priority=1) if async else notification_env).send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer()
Put SMS answer job in very high priority
Put SMS answer job in very high priority
Python
agpl-3.0
CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay() if async else notification_env) \ .send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer() Put SMS answer job in very high priority
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay(priority=1) if async else notification_env).send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer()
<commit_before># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay() if async else notification_env) \ .send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer() <commit_msg>Put SMS answer job in very high priority<commit_after>
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay(priority=1) if async else notification_env).send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer()
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay() if async else notification_env) \ .send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer() Put SMS answer job in very high priority# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay(priority=1) if async else notification_env).send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer()
<commit_before># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay() if async else notification_env) \ .send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer() <commit_msg>Put SMS answer job in very high priority<commit_after># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging import json from odoo import http, tools from odoo.http import request from ..tools import SmsNotificationAnswer async = not tools.config.get('test_enable') _logger = logging.getLogger(__name__) class RestController(http.Controller): @http.route('/sms/mnc/', type='http', auth='public', methods=['GET'], csrf=False) def sms_notification(self, **parameters): _logger.info("SMS Request received : {}".format( json.dumps(parameters))) notification_env = request.env['sms.notification'].sudo() (notification_env.with_delay(priority=1) if async else notification_env).send_sms_answer(parameters) return SmsNotificationAnswer([], costs=[]).get_answer()
bd26414b71497bbf39e40bd3b676cc345880b5dd
byceps/services/image/service.py
byceps/services/image/service.py
""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type ALL_IMAGE_TYPES = frozenset(ImageType) # type: FrozenSet[ImageType] class ImageTypeProhibited(ValueError): pass def get_all_image_types() -> FrozenSet[ImageType]: """Return all known image types.""" return ALL_IMAGE_TYPES def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions
""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type class ImageTypeProhibited(ValueError): pass def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions
Remove constant and function that list all existing image types
Remove constant and function that list all existing image types This allows having additional image types for (temporarily) internal purposes without accidentally exposing them.
Python
bsd-3-clause
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type ALL_IMAGE_TYPES = frozenset(ImageType) # type: FrozenSet[ImageType] class ImageTypeProhibited(ValueError): pass def get_all_image_types() -> FrozenSet[ImageType]: """Return all known image types.""" return ALL_IMAGE_TYPES def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions Remove constant and function that list all existing image types This allows having additional image types for (temporarily) internal purposes without accidentally exposing them.
""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type class ImageTypeProhibited(ValueError): pass def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions
<commit_before>""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type ALL_IMAGE_TYPES = frozenset(ImageType) # type: FrozenSet[ImageType] class ImageTypeProhibited(ValueError): pass def get_all_image_types() -> FrozenSet[ImageType]: """Return all known image types.""" return ALL_IMAGE_TYPES def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions <commit_msg>Remove constant and function that list all existing image types This allows having additional image types for (temporarily) internal purposes without accidentally exposing them.<commit_after>
""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type class ImageTypeProhibited(ValueError): pass def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions
""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type ALL_IMAGE_TYPES = frozenset(ImageType) # type: FrozenSet[ImageType] class ImageTypeProhibited(ValueError): pass def get_all_image_types() -> FrozenSet[ImageType]: """Return all known image types.""" return ALL_IMAGE_TYPES def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions Remove constant and function that list all existing image types This allows having additional image types for (temporarily) internal purposes without accidentally exposing them.""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type class ImageTypeProhibited(ValueError): pass def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions
<commit_before>""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type ALL_IMAGE_TYPES = frozenset(ImageType) # type: FrozenSet[ImageType] class ImageTypeProhibited(ValueError): pass def get_all_image_types() -> FrozenSet[ImageType]: """Return all known image types.""" return ALL_IMAGE_TYPES def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions <commit_msg>Remove constant and function that list all existing image types This allows having additional image types for (temporarily) internal purposes without accidentally exposing them.<commit_after>""" byceps.services.image.service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from typing import BinaryIO, FrozenSet, Iterable, Set from ...util.image import read_dimensions from ...util.image.models import Dimensions, ImageType from ...util.image.typeguess import guess_type class ImageTypeProhibited(ValueError): pass def get_image_type_names(types: Iterable[ImageType]) -> FrozenSet[str]: """Return the names of the image types.""" return frozenset(t.name.upper() for t in types) def determine_image_type(stream: BinaryIO, allowed_types: Set[ImageType]) \ -> ImageType: """Extract image type from stream.""" image_type = guess_type(stream) if image_type not in allowed_types: allowed_type_names = get_image_type_names(allowed_types) allowed_type_names_string = ', '.join(sorted(allowed_type_names)) raise ImageTypeProhibited( 'Image is not one of the allowed types ({}).' .format(allowed_type_names_string)) stream.seek(0) return image_type def determine_dimensions(stream: BinaryIO) -> Dimensions: """Extract image dimensions from stream.""" dimensions = read_dimensions(stream) stream.seek(0) return dimensions
ef69cad1175fa92543fce085cd46a9ec990fa55b
nbresuse/__init__.py
nbresuse/__init__.py
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
Add back the /metrics endpoint
Add back the /metrics endpoint
Python
bsd-2-clause
yuvipanda/nbresuse,yuvipanda/nbresuse
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()Add back the /metrics endpoint
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
<commit_before>from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()<commit_msg>Add back the /metrics endpoint<commit_after>
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()Add back the /metrics endpointfrom notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
<commit_before>from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()<commit_msg>Add back the /metrics endpoint<commit_after>from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
8b1c229aa3891ca80c88f3514d9c7014cf7909fc
src/epiweb/apps/survey/views.py
src/epiweb/apps/survey/views.py
# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def create_field(item): if item['type'] == 'yes-no': field = forms.ChoiceField(widget=forms.RadioSelect, choices=[('yes', _('Yes')), ('no', _('No'))]) elif item['type'] == 'option-multiple': field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'option-single': field = forms.ChoiceField(widget=forms.RadioSelect, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'date': field = forms.DateField(input_formats='%m/%d/%y') else: field = forms.CharField() field.label = item.get('label', None) field.required = False return field def create_form(data, values=None): if values: f = forms.Form(values) else: f = forms.Form() for item in data: f.fields[item['id']] = create_field(item) return f def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html)
# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html)
Remove form generator from the view.
Remove form generator from the view.
Python
agpl-3.0
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def create_field(item): if item['type'] == 'yes-no': field = forms.ChoiceField(widget=forms.RadioSelect, choices=[('yes', _('Yes')), ('no', _('No'))]) elif item['type'] == 'option-multiple': field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'option-single': field = forms.ChoiceField(widget=forms.RadioSelect, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'date': field = forms.DateField(input_formats='%m/%d/%y') else: field = forms.CharField() field.label = item.get('label', None) field.required = False return field def create_form(data, values=None): if values: f = forms.Form(values) else: f = forms.Form() for item in data: f.fields[item['id']] = create_field(item) return f def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html) Remove form generator from the view.
# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html)
<commit_before># -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def create_field(item): if item['type'] == 'yes-no': field = forms.ChoiceField(widget=forms.RadioSelect, choices=[('yes', _('Yes')), ('no', _('No'))]) elif item['type'] == 'option-multiple': field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'option-single': field = forms.ChoiceField(widget=forms.RadioSelect, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'date': field = forms.DateField(input_formats='%m/%d/%y') else: field = forms.CharField() field.label = item.get('label', None) field.required = False return field def create_form(data, values=None): if values: f = forms.Form(values) else: f = forms.Form() for item in data: f.fields[item['id']] = create_field(item) return f def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html) <commit_msg>Remove form generator from the view.<commit_after>
# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html)
# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def create_field(item): if item['type'] == 'yes-no': field = forms.ChoiceField(widget=forms.RadioSelect, choices=[('yes', _('Yes')), ('no', _('No'))]) elif item['type'] == 'option-multiple': field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'option-single': field = forms.ChoiceField(widget=forms.RadioSelect, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'date': field = forms.DateField(input_formats='%m/%d/%y') else: field = forms.CharField() field.label = item.get('label', None) field.required = False return field def create_form(data, values=None): if values: f = forms.Form(values) else: f = forms.Form() for item in data: f.fields[item['id']] = create_field(item) return f def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html) Remove form generator from the view.# -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html)
<commit_before># -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def create_field(item): if item['type'] == 'yes-no': field = forms.ChoiceField(widget=forms.RadioSelect, choices=[('yes', _('Yes')), ('no', _('No'))]) elif item['type'] == 'option-multiple': field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'option-single': field = forms.ChoiceField(widget=forms.RadioSelect, choices=zip(range(0, len(item['options'])), item['options'])) elif item['type'] == 'date': field = forms.DateField(input_formats='%m/%d/%y') else: field = forms.CharField() field.label = item.get('label', None) field.required = False return field def create_form(data, values=None): if values: f = forms.Form(values) else: f = forms.Form() for item in data: f.fields[item['id']] = create_field(item) return f def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html) <commit_msg>Remove form generator from the view.<commit_after># -*- coding: utf-8 -*- from django import forms from django.template import Context, loader from django.http import HttpResponse from epiweb.apps.survey import utils from epiweb.apps.survey.data import example def index(request): if request.method == 'POST': form = utils.generate_form(example.data.sections[0], request.POST) else: form = utils.generate_form(example.data.sections[0]) t = loader.get_template('survey/index.html') c = Context({ 'form': form }) return HttpResponse(t.render(c)) def survey(request, survey_id, page=None): html = "survey_id=%s, page=%s" % (survey_id, page) return HttpResponse(html)
8b4beb13a964a2822d1b5caf8c54f8201e802a2b
yolk/__init__.py
yolk/__init__.py
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.7'
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.8'
Increment patch version to 0.8.8
Increment patch version to 0.8.8
Python
bsd-3-clause
myint/yolk,myint/yolk
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.7' Increment patch version to 0.8.8
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.8'
<commit_before>"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.7' <commit_msg>Increment patch version to 0.8.8<commit_after>
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.8'
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.7' Increment patch version to 0.8.8"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.8'
<commit_before>"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.7' <commit_msg>Increment patch version to 0.8.8<commit_after>"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.8.8'
580b4817360b6ac8372f69b57a3ddcab2480ab70
docs/examples/tutorial/cython_tutorial/primes_cpp.py
docs/examples/tutorial/cython_tutorial/primes_cpp.py
# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cint = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list.
# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cython.int = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list.
Fix type used in C++ example.
docs: Fix type used in C++ example.
Python
apache-2.0
cython/cython,cython/cython,da-woods/cython,scoder/cython,scoder/cython,cython/cython,scoder/cython,scoder/cython,da-woods/cython,da-woods/cython,da-woods/cython,cython/cython
# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cint = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list. docs: Fix type used in C++ example.
# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cython.int = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list.
<commit_before># distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cint = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list. <commit_msg>docs: Fix type used in C++ example.<commit_after>
# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cython.int = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list.
# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cint = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list. docs: Fix type used in C++ example.# distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cython.int = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list.
<commit_before># distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cint = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list. <commit_msg>docs: Fix type used in C++ example.<commit_after># distutils: language=c++ import cython from cython.cimports.libcpp.vector import vector def primes(nb_primes: cython.uint): i: cython.int p: vector[cython.int] p.reserve(nb_primes) # allocate memory for 'nb_primes' elements. n: cython.int = 2 while p.size() < nb_primes: # size() for vectors is similar to len() for i in p: if n % i == 0: break else: p.push_back(n) # push_back is similar to append() n += 1 # If possible, C values and C++ objects are automatically # converted to Python objects at need. return p # so here, the vector will be copied into a Python list.
cba49af7fce05eb22fda3012f23c8fa8736fd022
polling_stations/apps/pollingstations/migrations/0009_customfinder.py
polling_stations/apps/pollingstations/migrations/0009_customfinder.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
Edit migration so it depends on 0008_auto_20160415_1854
Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239
Python
bsd-3-clause
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ] Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ] <commit_msg>Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ] Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ] <commit_msg>Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
361c0293085a688cc60e06b675eca66c9c52d72e
pi_approach/Distance_Pi/recv.py
pi_approach/Distance_Pi/recv.py
# Lidar Project Distance Subsystem import serial import socket import time arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance class Client(object): """A class that uses sockets to connect to a server""" HOST = "userinterface.local" PORT = 12345 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def socket_connection(self): Client.s.connect((Client.HOST, Client.PORT)) def receive_data(self): data = Client.s.recv(4096) return data def send_data(self, data): Client.s.send(data) client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1)
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1)
Remove local use of client, instead rely on imported library
Remove local use of client, instead rely on imported library
Python
mit
the-raspberry-pi-guy/lidar
# Lidar Project Distance Subsystem import serial import socket import time arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance class Client(object): """A class that uses sockets to connect to a server""" HOST = "userinterface.local" PORT = 12345 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def socket_connection(self): Client.s.connect((Client.HOST, Client.PORT)) def receive_data(self): data = Client.s.recv(4096) return data def send_data(self, data): Client.s.send(data) client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1) Remove local use of client, instead rely on imported library
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1)
<commit_before># Lidar Project Distance Subsystem import serial import socket import time arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance class Client(object): """A class that uses sockets to connect to a server""" HOST = "userinterface.local" PORT = 12345 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def socket_connection(self): Client.s.connect((Client.HOST, Client.PORT)) def receive_data(self): data = Client.s.recv(4096) return data def send_data(self, data): Client.s.send(data) client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1) <commit_msg>Remove local use of client, instead rely on imported library<commit_after>
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1)
# Lidar Project Distance Subsystem import serial import socket import time arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance class Client(object): """A class that uses sockets to connect to a server""" HOST = "userinterface.local" PORT = 12345 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def socket_connection(self): Client.s.connect((Client.HOST, Client.PORT)) def receive_data(self): data = Client.s.recv(4096) return data def send_data(self, data): Client.s.send(data) client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1) Remove local use of client, instead rely on imported library# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1)
<commit_before># Lidar Project Distance Subsystem import serial import socket import time arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance class Client(object): """A class that uses sockets to connect to a server""" HOST = "userinterface.local" PORT = 12345 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def socket_connection(self): Client.s.connect((Client.HOST, Client.PORT)) def receive_data(self): data = Client.s.recv(4096) return data def send_data(self, data): Client.s.send(data) client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1) <commit_msg>Remove local use of client, instead rely on imported library<commit_after># Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient arduino_dist = serial.Serial('/dev/ttyUSB0',9600) def get_distance(): distance = arduino_dist.readline() return distance client = Client() client.socket_connection() while True: client.send_data("Hello! Yo!") time.sleep(1)
0cc12b24ec4aac88380a36bb519bfc78ad81b277
run_job.py
run_job.py
#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] Bootstrap.run(session, data['build_id'], data['job_server'], run_info['step_fun'], args = run_info['args'], kwargs = run_info['kwargs'], env = run_info['env'])
#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] or {} Bootstrap.run(session, data['build_id'], data['job_server'], run_info.get('step_fun'), args = run_info.get('args', []), kwargs = run_info.get('kwargs', {}), env = run_info.get('env'))
Support when run_info is not specified
Support when run_info is not specified That is the case when starting a build (not running a step)
Python
apache-2.0
boivie/sci,boivie/sci
#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] Bootstrap.run(session, data['build_id'], data['job_server'], run_info['step_fun'], args = run_info['args'], kwargs = run_info['kwargs'], env = run_info['env']) Support when run_info is not specified That is the case when starting a build (not running a step)
#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] or {} Bootstrap.run(session, data['build_id'], data['job_server'], run_info.get('step_fun'), args = run_info.get('args', []), kwargs = run_info.get('kwargs', {}), env = run_info.get('env'))
<commit_before>#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] Bootstrap.run(session, data['build_id'], data['job_server'], run_info['step_fun'], args = run_info['args'], kwargs = run_info['kwargs'], env = run_info['env']) <commit_msg>Support when run_info is not specified That is the case when starting a build (not running a step)<commit_after>
#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] or {} Bootstrap.run(session, data['build_id'], data['job_server'], run_info.get('step_fun'), args = run_info.get('args', []), kwargs = run_info.get('kwargs', {}), env = run_info.get('env'))
#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] Bootstrap.run(session, data['build_id'], data['job_server'], run_info['step_fun'], args = run_info['args'], kwargs = run_info['kwargs'], env = run_info['env']) Support when run_info is not specified That is the case when starting a build (not running a step)#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] or {} Bootstrap.run(session, data['build_id'], data['job_server'], run_info.get('step_fun'), args = run_info.get('args', []), kwargs = run_info.get('kwargs', {}), env = run_info.get('env'))
<commit_before>#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] Bootstrap.run(session, data['build_id'], data['job_server'], run_info['step_fun'], args = run_info['args'], kwargs = run_info['kwargs'], env = run_info['env']) <commit_msg>Support when run_info is not specified That is the case when starting a build (not running a step)<commit_after>#!/usr/bin/env python # # Syntax: ./run_job <session-id> # # It should be run with the current working directory set properly # import sys, json from sci.session import Session from sci.bootstrap import Bootstrap data = json.loads(sys.stdin.read()) session_id = sys.argv[1] session = Session.load(session_id) run_info = data['run_info'] or {} Bootstrap.run(session, data['build_id'], data['job_server'], run_info.get('step_fun'), args = run_info.get('args', []), kwargs = run_info.get('kwargs', {}), env = run_info.get('env'))
2b60161118c2407c9bff736710b4f1a4b62a7468
scipy/constants/tests/test_codata.py
scipy/constants/tests/test_codata.py
import warnings from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) if __name__ == "__main__": run_module_suite()
import warnings import codata import constants from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) def test_basic_table_parse(): c = 'speed of light in vacuum' assert_equal(codata.value(c), constants.c) assert_equal(codata.value(c), constants.speed_of_light) def test_basic_lookup(): assert_equal('%d %s' % (codata.c, codata.unit('speed of light in vacuum')), '299792458 m s^-1') if __name__ == "__main__": run_module_suite()
Add very basic tests for codata and constants.
ENH: Add very basic tests for codata and constants. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@6563 d6536bca-fef9-0310-8506-e4c0a848fbcf
Python
bsd-3-clause
lesserwhirls/scipy-cwt,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,jasonmccampbell/scipy-refactor
import warnings from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) if __name__ == "__main__": run_module_suite() ENH: Add very basic tests for codata and constants. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@6563 d6536bca-fef9-0310-8506-e4c0a848fbcf
import warnings import codata import constants from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) def test_basic_table_parse(): c = 'speed of light in vacuum' assert_equal(codata.value(c), constants.c) assert_equal(codata.value(c), constants.speed_of_light) def test_basic_lookup(): assert_equal('%d %s' % (codata.c, codata.unit('speed of light in vacuum')), '299792458 m s^-1') if __name__ == "__main__": run_module_suite()
<commit_before> import warnings from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) if __name__ == "__main__": run_module_suite() <commit_msg>ENH: Add very basic tests for codata and constants. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@6563 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
import warnings import codata import constants from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) def test_basic_table_parse(): c = 'speed of light in vacuum' assert_equal(codata.value(c), constants.c) assert_equal(codata.value(c), constants.speed_of_light) def test_basic_lookup(): assert_equal('%d %s' % (codata.c, codata.unit('speed of light in vacuum')), '299792458 m s^-1') if __name__ == "__main__": run_module_suite()
import warnings from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) if __name__ == "__main__": run_module_suite() ENH: Add very basic tests for codata and constants. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@6563 d6536bca-fef9-0310-8506-e4c0a848fbcf import warnings import codata import constants from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) def test_basic_table_parse(): c = 'speed of light in vacuum' assert_equal(codata.value(c), constants.c) assert_equal(codata.value(c), constants.speed_of_light) def test_basic_lookup(): assert_equal('%d %s' % (codata.c, codata.unit('speed of light in vacuum')), '299792458 m s^-1') if __name__ == "__main__": run_module_suite()
<commit_before> import warnings from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) if __name__ == "__main__": run_module_suite() <commit_msg>ENH: Add very basic tests for codata and constants. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@6563 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after> import warnings import codata import constants from scipy.constants import find from numpy.testing import assert_equal, run_module_suite def test_find(): warnings.simplefilter('ignore', DeprecationWarning) keys = find('weak mixing', disp=False) assert_equal(keys, ['weak mixing angle']) keys = find('qwertyuiop', disp=False) assert_equal(keys, []) keys = find('natural unit', disp=False) assert_equal(keys, sorted(['natural unit of velocity', 'natural unit of action', 'natural unit of action in eV s', 'natural unit of mass', 'natural unit of energy', 'natural unit of energy in MeV', 'natural unit of momentum', 'natural unit of momentum in MeV/c', 'natural unit of length', 'natural unit of time'])) def test_basic_table_parse(): c = 'speed of light in vacuum' assert_equal(codata.value(c), constants.c) assert_equal(codata.value(c), constants.speed_of_light) def test_basic_lookup(): assert_equal('%d %s' % (codata.c, codata.unit('speed of light in vacuum')), '299792458 m s^-1') if __name__ == "__main__": run_module_suite()
8842bbf45ffe2a76832075e053dce90a95964bcd
Bookie/bookie/tests/__init__.py
Bookie/bookie/tests/__init__.py
import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings)
import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() # we need to pull the right ini for the test we want to run # by default pullup test.ini, but we might want to test mysql, pgsql, etc test_ini = os.environ.get('BOOKIE_TEST_INI', None) if test_ini: ini.read(test_ini) else: ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings)
Add ability to set test ini via env variable
Add ability to set test ini via env variable
Python
agpl-3.0
charany1/Bookie,teodesson/Bookie,skmezanul/Bookie,teodesson/Bookie,skmezanul/Bookie,adamlincoln/Bookie,adamlincoln/Bookie,adamlincoln/Bookie,GreenLunar/Bookie,bookieio/Bookie,pombredanne/Bookie,wangjun/Bookie,adamlincoln/Bookie,pombredanne/Bookie,pombredanne/Bookie,skmezanul/Bookie,bookieio/Bookie,charany1/Bookie,GreenLunar/Bookie,wangjun/Bookie,bookieio/Bookie,bookieio/Bookie,GreenLunar/Bookie,charany1/Bookie,skmezanul/Bookie,GreenLunar/Bookie,teodesson/Bookie,wangjun/Bookie,wangjun/Bookie,teodesson/Bookie
import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings) Add ability to set test ini via env variable
import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() # we need to pull the right ini for the test we want to run # by default pullup test.ini, but we might want to test mysql, pgsql, etc test_ini = os.environ.get('BOOKIE_TEST_INI', None) if test_ini: ini.read(test_ini) else: ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings)
<commit_before>import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings) <commit_msg>Add ability to set test ini via env variable<commit_after>
import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() # we need to pull the right ini for the test we want to run # by default pullup test.ini, but we might want to test mysql, pgsql, etc test_ini = os.environ.get('BOOKIE_TEST_INI', None) if test_ini: ini.read(test_ini) else: ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings)
import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings) Add ability to set test ini via env variableimport ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() # we need to pull the right ini for the test we want to run # by default pullup test.ini, but we might want to test mysql, pgsql, etc test_ini = os.environ.get('BOOKIE_TEST_INI', None) if test_ini: ini.read(test_ini) else: ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings)
<commit_before>import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings) <commit_msg>Add ability to set test ini via env variable<commit_after>import ConfigParser import os import unittest from pyramid.config import Configurator from pyramid import testing global_config = {} ini = ConfigParser.ConfigParser() # we need to pull the right ini for the test we want to run # by default pullup test.ini, but we might want to test mysql, pgsql, etc test_ini = os.environ.get('BOOKIE_TEST_INI', None) if test_ini: ini.read(test_ini) else: ini.read('test.ini') settings = dict(ini.items('app:bookie')) def setup_db(settings): """ We need to create the test sqlite database to run our tests against If the db exists, remove it We're using the SA-Migrations API to create the db and catch it up to the latest migration level for testing In theory, we could use this API to do version specific testing as well if we needed to. If we want to run any tests with a fresh db we can call this function """ from migrate.versioning import api as mig sa_url = settings['sqlalchemy.url'] migrate_repository = 'migrations' # we're hackish here since we're going to assume the test db is whatever is # after the last slash of the SA url sqlite:///somedb.db db_name = sa_url[sa_url.rindex('/') + 1:] try: os.remove(db_name) except: pass open(db_name, 'w').close() mig.version_control(sa_url, migrate_repository) mig.upgrade(sa_url, migrate_repository) setup_db(settings)
0ae8cb79ef13ac652edca3a29825436c8c2d6cd8
SessionManager.py
SessionManager.py
import sublime import sublime_plugin from .modules import messages from .modules import settings class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): pass def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data())
import sublime import sublime_plugin from .modules import messages from .modules import serialize from .modules import settings from .modules.session import Session class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): session = Session.save(session_name, sublime.windows()) serialize.dump(session_name, session) def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data())
Save the session on SaveSession
Save the session on SaveSession
Python
mit
Zeeker/sublime-SessionManager
import sublime import sublime_plugin from .modules import messages from .modules import settings class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): pass def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data()) Save the session on SaveSession
import sublime import sublime_plugin from .modules import messages from .modules import serialize from .modules import settings from .modules.session import Session class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): session = Session.save(session_name, sublime.windows()) serialize.dump(session_name, session) def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data())
<commit_before>import sublime import sublime_plugin from .modules import messages from .modules import settings class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): pass def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data()) <commit_msg>Save the session on SaveSession<commit_after>
import sublime import sublime_plugin from .modules import messages from .modules import serialize from .modules import settings from .modules.session import Session class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): session = Session.save(session_name, sublime.windows()) serialize.dump(session_name, session) def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data())
import sublime import sublime_plugin from .modules import messages from .modules import settings class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): pass def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data()) Save the session on SaveSessionimport sublime import sublime_plugin from .modules import messages from .modules import serialize from .modules import settings from .modules.session import Session class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): session = Session.save(session_name, sublime.windows()) serialize.dump(session_name, session) def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data())
<commit_before>import sublime import sublime_plugin from .modules import messages from .modules import settings class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): pass def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data()) <commit_msg>Save the session on SaveSession<commit_after>import sublime import sublime_plugin from .modules import messages from .modules import serialize from .modules import settings from .modules.session import Session class SaveSession(sublime_plugin.ApplicationCommand): def run(self): settings.load() sublime.active_window().show_input_panel( messages.dialog("session_name"), self.generate_name(), on_done=self.save_session, on_change=None, on_cancel=None ) def generate_name(self): return "placeholder" def save_session(self, session_name): session = Session.save(session_name, sublime.windows()) serialize.dump(session_name, session) def is_enabled(self): windows = sublime.windows() for window in windows: if is_saveable(window): return True return False def is_saveable(window): return bool(window.views()) or bool(window.project_data())
d650cbe26ce0fcc4c5146466d2827b930c153b0f
PlatformPhysicsOperation.py
PlatformPhysicsOperation.py
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
Use GroupedOperation for merging PlatformPhyisicsOperation
Use GroupedOperation for merging PlatformPhyisicsOperation
Python
agpl-3.0
senttech/Cura,quillford/Cura,fxtentacle/Cura,hmflash/Cura,totalretribution/Cura,ynotstartups/Wanhao,bq/Ultimaker-Cura,lo0ol/Ultimaker-Cura,lo0ol/Ultimaker-Cura,DeskboxBrazil/Cura,derekhe/Cura,fxtentacle/Cura,fieldOfView/Cura,quillford/Cura,fieldOfView/Cura,derekhe/Cura,senttech/Cura,totalretribution/Cura,DeskboxBrazil/Cura,Curahelper/Cura,hmflash/Cura,bq/Ultimaker-Cura,ad1217/Cura,ynotstartups/Wanhao,ad1217/Cura,markwal/Cura,Curahelper/Cura,markwal/Cura
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False Use GroupedOperation for merging PlatformPhyisicsOperation
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
<commit_before>from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False <commit_msg>Use GroupedOperation for merging PlatformPhyisicsOperation<commit_after>
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False Use GroupedOperation for merging PlatformPhyisicsOperationfrom UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
<commit_before>from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False <commit_msg>Use GroupedOperation for merging PlatformPhyisicsOperation<commit_after>from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
d77fcfc212b81c2935a2de9b712af5b6f8c43ee1
server/mlabns/tests/test_distance.py
server/mlabns/tests/test_distance.py
import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main()
import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main()
Update indentation as per style guide
Update indentation as per style guide
Python
apache-2.0
fernandalavalle/mlab-ns,m-lab/mlab-ns,fernandalavalle/mlab-ns,m-lab/mlab-ns,m-lab/mlab-ns,fernandalavalle/mlab-ns,m-lab/mlab-ns,fernandalavalle/mlab-ns
import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main() Update indentation as per style guide
import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main()
<commit_before>import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main() <commit_msg>Update indentation as per style guide<commit_after>
import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main()
import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main() Update indentation as per style guideimport unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main()
<commit_before>import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main() <commit_msg>Update indentation as per style guide<commit_after>import unittest2 from mlabns.util import distance class DistanceTestCase(unittest2.TestCase): def testValidSmallDistance(self): dist = distance.distance(0, 0, 10, 10) self.assertEqual(1568.5205567985761, dist) def testValidLargeDistance(self): dist = distance.distance(20, 20, 100, 100) self.assertEqual(8009.5721050828461, dist) def testInvalidInputs(self): import math from numbers import Number dist = 0 try: dist = distance.distance(-700,1000,999,-5454) except Exception: self.fail("distance threw an exception on invalid entry") self.assertTrue(isinstance(dist, Number)) self.assertFalse(math.isnan(dist)) if __name__ == '__main__': unittest2.main()
9024dcd1da124acb7ced918379f923d8be85acb7
src/htrun/host_tests_runner/__init__.py
src/htrun/host_tests_runner/__init__.py
# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("greentea").version
# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("mbed-greentea").version
Fix package name passed to setuptools
htrun: Fix package name passed to setuptools We passed the package name `greentea` to setuptools to discover the current package version. This failed because the package name is actually `mbed-greentea`.
Python
apache-2.0
ARMmbed/greentea
# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("greentea").version htrun: Fix package name passed to setuptools We passed the package name `greentea` to setuptools to discover the current package version. This failed because the package name is actually `mbed-greentea`.
# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("mbed-greentea").version
<commit_before># # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("greentea").version <commit_msg>htrun: Fix package name passed to setuptools We passed the package name `greentea` to setuptools to discover the current package version. This failed because the package name is actually `mbed-greentea`.<commit_after>
# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("mbed-greentea").version
# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("greentea").version htrun: Fix package name passed to setuptools We passed the package name `greentea` to setuptools to discover the current package version. This failed because the package name is actually `mbed-greentea`.# # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("mbed-greentea").version
<commit_before># # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("greentea").version <commit_msg>htrun: Fix package name passed to setuptools We passed the package name `greentea` to setuptools to discover the current package version. This failed because the package name is actually `mbed-greentea`.<commit_after># # Copyright (c) 2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """! @package greentea-host-test-runner This package contains basic host test implementation with algorithms to flash and reset device. Functionality can be overridden by set of plugins which can provide specialised flashing and reset implementations. """ from pkg_resources import get_distribution __version__ = get_distribution("mbed-greentea").version
5d6fcf06c4cef18507c5b9882398a673e45e0734
scikits/image/__init__.py
scikits/image/__init__.py
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import *
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import gzip import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import *
Work around `AttributeError: 'module' object has no attribute 'BufferedIOBase'` on Python 2.7+, Windows
Work around `AttributeError: 'module' object has no attribute 'BufferedIOBase'` on Python 2.7+, Windows
Python
bsd-3-clause
warmspringwinds/scikit-image,SamHames/scikit-image,almarklein/scikit-image,jwiggins/scikit-image,blink1073/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,ajaybhat/scikit-image,vighneshbirodkar/scikit-image,emon10005/scikit-image,chintak/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,ajaybhat/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,michaelaye/scikit-image,almarklein/scikit-image,emmanuelle/scikits.image,paalge/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,michaelpacer/scikit-image,newville/scikit-image,emmanuelle/scikits.image,bennlich/scikit-image,juliusbierk/scikit-image,pratapvardhan/scikit-image,bennlich/scikit-image,michaelaye/scikit-image,rjeli/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,Britefury/scikit-image,paalge/scikit-image,dpshelio/scikit-image,newville/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,emmanuelle/scikits.image,emon10005/scikit-image,ofgulban/scikit-image,keflavich/scikit-image,chintak/scikit-image,robintw/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,robintw/scikit-image,chriscrosscutler/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,Britefury/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,oew1v07/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,WarrenWeckesser/scikits-image,GaZ3ll3/scikit-image,SamHames/scikit-image,blink1073/scikit-image,Midafi/scikit-image,keflavich/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,ofgulban/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,Midafi/scikit-image,youprofit/scikit-image,emmanuelle/scikits.image
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import * Work around `AttributeError: 'module' object has no attribute 'BufferedIOBase'` on Python 2.7+, Windows
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import gzip import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import *
<commit_before>"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import * <commit_msg>Work around `AttributeError: 'module' object has no attribute 'BufferedIOBase'` on Python 2.7+, Windows<commit_after>
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import gzip import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import *
"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import * Work around `AttributeError: 'module' object has no attribute 'BufferedIOBase'` on Python 2.7+, Windows"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import gzip import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import *
<commit_before>"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import * <commit_msg>Work around `AttributeError: 'module' object has no attribute 'BufferedIOBase'` on Python 2.7+, Windows<commit_after>"""Image Processing SciKit (Toolbox for SciPy)""" import os.path as _osp data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data')) from version import version as __version__ def _setup_test(): import gzip import functools basedir = _osp.dirname(_osp.join(__file__, '../')) args = ['', '--exe', '-w', '%s' % basedir] try: import nose as _nose except ImportError: print("Could not load nose. Unit tests not available.") return None else: return functools.partial(_nose.run, 'scikits.image', argv=args) test = _setup_test() if test is None: del test def get_log(name): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ import logging, sys logging.basicConfig(stream=sys.stdout, level=logging.WARNING) return logging.getLogger(name) from util.dtype import *
fa8cfbc631dfab0067b8c15bf6374579af071e7a
tests/test_main.py
tests/test_main.py
import sys import unittest import tempfile import pathlib import os import os.path from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name)
import sys import unittest import tempfile import pathlib import os import os.path import time from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) time.sleep(2) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name)
Add sleep during tests to prevent race
Add sleep during tests to prevent race
Python
bsd-3-clause
jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor
import sys import unittest import tempfile import pathlib import os import os.path from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name) Add sleep during tests to prevent race
import sys import unittest import tempfile import pathlib import os import os.path import time from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) time.sleep(2) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name)
<commit_before>import sys import unittest import tempfile import pathlib import os import os.path from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name) <commit_msg>Add sleep during tests to prevent race<commit_after>
import sys import unittest import tempfile import pathlib import os import os.path import time from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) time.sleep(2) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name)
import sys import unittest import tempfile import pathlib import os import os.path from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name) Add sleep during tests to prevent raceimport sys import unittest import tempfile import pathlib import os import os.path import time from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) time.sleep(2) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name)
<commit_before>import sys import unittest import tempfile import pathlib import os import os.path from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name) <commit_msg>Add sleep during tests to prevent race<commit_after>import sys import unittest import tempfile import pathlib import os import os.path import time from unittest.mock import patch import monitor class TestMonitor(unittest.TestCase): def test_MonitorConfigInterval(self): with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"] with patch.object(sys, "argv", testargs): monitor.main() with self.assertRaises(SystemExit): testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"] with patch.object(sys, "argv", testargs): monitor.main() def test_file_hup(self): temp_file_info = tempfile.mkstemp() os.close(temp_file_info[0]) temp_file_name = temp_file_info[1] monitor.check_hup_file(temp_file_name) time.sleep(2) pathlib.Path(temp_file_name).touch() self.assertEqual( monitor.check_hup_file(temp_file_name), True, "check_hup_file did not trigger", ) self.assertEqual( monitor.check_hup_file(temp_file_name), False, "check_hup_file should not have triggered", ) os.unlink(temp_file_name)
95474b52fd81b8363809fe915bd38d00335424a9
thinglang/execution/builtins.py
thinglang/execution/builtins.py
class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self): self.data = [] def get_line(self): line = input() self.data.append(line) return line
class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self, heap): self.data = [] self.heap = heap def get_line(self, line=None): if line is not None: self.heap['Output'].write(line) line = input() self.data.append(line) return line
Update Input object to support direct output during get_line operations
Update Input object to support direct output during get_line operations
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self): self.data = [] def get_line(self): line = input() self.data.append(line) return line Update Input object to support direct output during get_line operations
class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self, heap): self.data = [] self.heap = heap def get_line(self, line=None): if line is not None: self.heap['Output'].write(line) line = input() self.data.append(line) return line
<commit_before>class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self): self.data = [] def get_line(self): line = input() self.data.append(line) return line <commit_msg>Update Input object to support direct output during get_line operations<commit_after>
class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self, heap): self.data = [] self.heap = heap def get_line(self, line=None): if line is not None: self.heap['Output'].write(line) line = input() self.data.append(line) return line
class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self): self.data = [] def get_line(self): line = input() self.data.append(line) return line Update Input object to support direct output during get_line operationsclass ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self, heap): self.data = [] self.heap = heap def get_line(self, line=None): if line is not None: self.heap['Output'].write(line) line = input() self.data.append(line) return line
<commit_before>class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self): self.data = [] def get_line(self): line = input() self.data.append(line) return line <commit_msg>Update Input object to support direct output during get_line operations<commit_after>class ThingObjectBase(object): def __getitem__(self, item): return getattr(self, item) def __contains__(self, item): return hasattr(self, item) class ThingObjectOutput(ThingObjectBase): def __init__(self): self.data = [] def write(self, *args): self.data.append(' '.join(str(x) for x in args)) class ThingObjectInput(ThingObjectBase): def __init__(self, heap): self.data = [] self.heap = heap def get_line(self, line=None): if line is not None: self.heap['Output'].write(line) line = input() self.data.append(line) return line
9309efcbd84a0d78c162c2ee595c7e98fcdf68f9
django/contrib/comments/feeds.py
django/contrib/comments/feeds.py
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
Use correct m2m join table name in LatestCommentsFeed git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
Python
bsd-3-clause
svn2github/django,svn2github/django,svn2github/django
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date Use correct m2m join table name in LatestCommentsFeed git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
<commit_before>from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date <commit_msg>Use correct m2m join table name in LatestCommentsFeed git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date Use correct m2m join table name in LatestCommentsFeed git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
<commit_before>from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date <commit_msg>Use correct m2m join table name in LatestCommentsFeed git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
bdf35f5d45bd701bc720cd7bed6db5d7b311e713
pyxform/tests_v1/test_background_audio.py
pyxform/tests_v1/test_background_audio.py
# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class AuditTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], )
# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase import unittest class BackgroundAudioTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) @unittest.skip("Required update to Validate to work") def test_background_audio_is_valid(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, run_odk_validate=True, )
Add ignored test for recordaction validation
Add ignored test for recordaction validation
Python
bsd-2-clause
XLSForm/pyxform,XLSForm/pyxform
# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class AuditTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) Add ignored test for recordaction validation
# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase import unittest class BackgroundAudioTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) @unittest.skip("Required update to Validate to work") def test_background_audio_is_valid(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, run_odk_validate=True, )
<commit_before># -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class AuditTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) <commit_msg>Add ignored test for recordaction validation<commit_after>
# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase import unittest class BackgroundAudioTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) @unittest.skip("Required update to Validate to work") def test_background_audio_is_valid(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, run_odk_validate=True, )
# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class AuditTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) Add ignored test for recordaction validation# -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase import unittest class BackgroundAudioTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) @unittest.skip("Required update to Validate to work") def test_background_audio_is_valid(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, run_odk_validate=True, )
<commit_before># -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class AuditTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) <commit_msg>Add ignored test for recordaction validation<commit_after># -*- coding: utf-8 -*- from pyxform.tests_v1.pyxform_test_case import PyxformTestCase import unittest class BackgroundAudioTest(PyxformTestCase): def test_background_audio(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, xml__contains=[ '<odk:recordaudio event="odk-instance-load" ref="/data/my_recording"/>', ], ) @unittest.skip("Required update to Validate to work") def test_background_audio_is_valid(self): self.assertPyxformXform( name="data", md=""" | survey | | | | | type | name | | | background-audio | my_recording | """, run_odk_validate=True, )
9ef4d62362ab38623499d7f00bab1b05c9e016c0
user_deletion/notifications.py
user_deletion/notifications.py
from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def send_emails(notification): messages = [] context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) messages.append([ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ]) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted')
from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def build_emails(notification): context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) yield [ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ] def send_emails(notification): messages = build_emails(notification) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted')
Use a generator instead of append
Use a generator instead of append
Python
bsd-2-clause
incuna/django-user-deletion
from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def send_emails(notification): messages = [] context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) messages.append([ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ]) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted') Use a generator instead of append
from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def build_emails(notification): context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) yield [ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ] def send_emails(notification): messages = build_emails(notification) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted')
<commit_before>from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def send_emails(notification): messages = [] context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) messages.append([ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ]) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted') <commit_msg>Use a generator instead of append<commit_after>
from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def build_emails(notification): context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) yield [ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ] def send_emails(notification): messages = build_emails(notification) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted')
from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def send_emails(notification): messages = [] context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) messages.append([ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ]) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted') Use a generator instead of appendfrom django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def build_emails(notification): context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) yield [ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ] def send_emails(notification): messages = build_emails(notification) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted')
<commit_before>from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def send_emails(notification): messages = [] context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) messages.append([ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ]) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted') <commit_msg>Use a generator instead of append<commit_after>from django.conf import settings from django.core.mail import send_mass_mail from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from pigeon.notification import Notification def build_emails(notification): context = {'site': notification.site} for user in notification.users: message = render_to_string(notification.template_name, context) yield [ notification.subject, message, settings.DEFAULT_FROM_EMAIL, [user.email], ] def send_emails(notification): messages = build_emails(notification) send_mass_mail(messages) class AccountInactiveNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_notification.txt' subject = _('Re-activate your account') class AccountDeletedNotification(Notification): handlers = (send_emails,) template_name = 'user_deletion/email_deletion.txt' subject = _('Your account has been deleted')
a8d29f72070b643d08f51bc13d1f3ebe6cf68e7e
src/pip/__pip-runner__.py
src/pip/__pip-runner__.py
"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True)
"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec # TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True)
Change something so tests are not skipped
Change something so tests are not skipped
Python
mit
pfmoore/pip,pypa/pip,pypa/pip,pfmoore/pip,sbidoul/pip,pradyunsg/pip,sbidoul/pip,pradyunsg/pip
"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True) Change something so tests are not skipped
"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec # TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True)
<commit_before>"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True) <commit_msg>Change something so tests are not skipped<commit_after>
"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec # TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True)
"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True) Change something so tests are not skipped"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec # TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True)
<commit_before>"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True) <commit_msg>Change something so tests are not skipped<commit_after>"""Execute exactly this copy of pip, within a different environment. This file is named as it is, to ensure that this module can't be imported via an import statement. """ import runpy import sys import types from importlib.machinery import ModuleSpec, PathFinder from os.path import dirname from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod def find_spec( self, fullname: str, path: Optional[Sequence[Union[bytes, str]]] = None, target: Optional[types.ModuleType] = None, ) -> Optional[ModuleSpec]: if fullname != "pip": return None spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) assert spec, (PIP_SOURCES_ROOT, fullname) return spec # TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" runpy.run_module("pip", run_name="__main__", alter_sys=True)
7d8d5516a279cf1349af703f9051bb1acf084eaa
tests/test_browser_test_case.py
tests/test_browser_test_case.py
from unittest import TestCase from keteparaha.browser import BrowserTestCase class BrowserTestCaseTest(TestCase): class SubClassed(BrowserTestCase): def do_nothing(self): pass def test_start_browser_when_given_unsupported_driver(self): bc = self.SubClassed("do_nothing") with self.assertRaises(ValueError): bc.start_browser(driver="NoReal") self.assertEqual(bc._browsers, []) def test_browser_is_cleaned_up_afterwards(self): bc = self.SubClassed("do_nothing") bc.start_browser("Firefox") bc.doCleanups() with self.assertRaises(Exception): bc.title
from unittest import TestCase from keteparaha.browser import BrowserTestCase class SubClassed(BrowserTestCase): def do_nothing(self): pass class BrowserTestCaseTest(TestCase): def test_browser_returns_last_browser_started(self): btc = SubClassed('do_nothing') btc.browsers.append('b1') btc.browsers.append('b2') btc.browsers.append('b3') self.assertEqual(btc.browser, 'b3')
Modify tests for the BrowserTestCase class so they don't hang
Modify tests for the BrowserTestCase class so they don't hang
Python
mit
aychedee/keteparaha,tomdottom/keteparaha
from unittest import TestCase from keteparaha.browser import BrowserTestCase class BrowserTestCaseTest(TestCase): class SubClassed(BrowserTestCase): def do_nothing(self): pass def test_start_browser_when_given_unsupported_driver(self): bc = self.SubClassed("do_nothing") with self.assertRaises(ValueError): bc.start_browser(driver="NoReal") self.assertEqual(bc._browsers, []) def test_browser_is_cleaned_up_afterwards(self): bc = self.SubClassed("do_nothing") bc.start_browser("Firefox") bc.doCleanups() with self.assertRaises(Exception): bc.title Modify tests for the BrowserTestCase class so they don't hang
from unittest import TestCase from keteparaha.browser import BrowserTestCase class SubClassed(BrowserTestCase): def do_nothing(self): pass class BrowserTestCaseTest(TestCase): def test_browser_returns_last_browser_started(self): btc = SubClassed('do_nothing') btc.browsers.append('b1') btc.browsers.append('b2') btc.browsers.append('b3') self.assertEqual(btc.browser, 'b3')
<commit_before>from unittest import TestCase from keteparaha.browser import BrowserTestCase class BrowserTestCaseTest(TestCase): class SubClassed(BrowserTestCase): def do_nothing(self): pass def test_start_browser_when_given_unsupported_driver(self): bc = self.SubClassed("do_nothing") with self.assertRaises(ValueError): bc.start_browser(driver="NoReal") self.assertEqual(bc._browsers, []) def test_browser_is_cleaned_up_afterwards(self): bc = self.SubClassed("do_nothing") bc.start_browser("Firefox") bc.doCleanups() with self.assertRaises(Exception): bc.title <commit_msg>Modify tests for the BrowserTestCase class so they don't hang<commit_after>
from unittest import TestCase from keteparaha.browser import BrowserTestCase class SubClassed(BrowserTestCase): def do_nothing(self): pass class BrowserTestCaseTest(TestCase): def test_browser_returns_last_browser_started(self): btc = SubClassed('do_nothing') btc.browsers.append('b1') btc.browsers.append('b2') btc.browsers.append('b3') self.assertEqual(btc.browser, 'b3')
from unittest import TestCase from keteparaha.browser import BrowserTestCase class BrowserTestCaseTest(TestCase): class SubClassed(BrowserTestCase): def do_nothing(self): pass def test_start_browser_when_given_unsupported_driver(self): bc = self.SubClassed("do_nothing") with self.assertRaises(ValueError): bc.start_browser(driver="NoReal") self.assertEqual(bc._browsers, []) def test_browser_is_cleaned_up_afterwards(self): bc = self.SubClassed("do_nothing") bc.start_browser("Firefox") bc.doCleanups() with self.assertRaises(Exception): bc.title Modify tests for the BrowserTestCase class so they don't hangfrom unittest import TestCase from keteparaha.browser import BrowserTestCase class SubClassed(BrowserTestCase): def do_nothing(self): pass class BrowserTestCaseTest(TestCase): def test_browser_returns_last_browser_started(self): btc = SubClassed('do_nothing') btc.browsers.append('b1') btc.browsers.append('b2') btc.browsers.append('b3') self.assertEqual(btc.browser, 'b3')
<commit_before>from unittest import TestCase from keteparaha.browser import BrowserTestCase class BrowserTestCaseTest(TestCase): class SubClassed(BrowserTestCase): def do_nothing(self): pass def test_start_browser_when_given_unsupported_driver(self): bc = self.SubClassed("do_nothing") with self.assertRaises(ValueError): bc.start_browser(driver="NoReal") self.assertEqual(bc._browsers, []) def test_browser_is_cleaned_up_afterwards(self): bc = self.SubClassed("do_nothing") bc.start_browser("Firefox") bc.doCleanups() with self.assertRaises(Exception): bc.title <commit_msg>Modify tests for the BrowserTestCase class so they don't hang<commit_after>from unittest import TestCase from keteparaha.browser import BrowserTestCase class SubClassed(BrowserTestCase): def do_nothing(self): pass class BrowserTestCaseTest(TestCase): def test_browser_returns_last_browser_started(self): btc = SubClassed('do_nothing') btc.browsers.append('b1') btc.browsers.append('b2') btc.browsers.append('b3') self.assertEqual(btc.browser, 'b3')
9510a0da5a6fee780e16db8f128f7c24bdb579d4
tests/test_post_import_hooks.py
tests/test_post_import_hooks.py
from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('socket') def hook_socket(module): self.assertEqual(module.__name__, 'socket') invoked.append(1) self.assertEqual(len(invoked), 0) import socket self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main()
from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('this') def hook_this(module): self.assertEqual(module.__name__, 'this') invoked.append(1) self.assertEqual(len(invoked), 0) import this self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main()
Adjust test to use different module as socket imported by coverage tools.
Adjust test to use different module as socket imported by coverage tools.
Python
bsd-2-clause
linglaiyao1314/wrapt,pombredanne/python-lazy-object-proxy,linglaiyao1314/wrapt,pombredanne/wrapt,akash1808/wrapt,pombredanne/wrapt,github4ry/wrapt,wujuguang/wrapt,pombredanne/python-lazy-object-proxy,akash1808/wrapt,ionelmc/python-lazy-object-proxy,ionelmc/python-lazy-object-proxy,github4ry/wrapt,GrahamDumpleton/wrapt,GrahamDumpleton/wrapt,wujuguang/wrapt
from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('socket') def hook_socket(module): self.assertEqual(module.__name__, 'socket') invoked.append(1) self.assertEqual(len(invoked), 0) import socket self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main() Adjust test to use different module as socket imported by coverage tools.
from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('this') def hook_this(module): self.assertEqual(module.__name__, 'this') invoked.append(1) self.assertEqual(len(invoked), 0) import this self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main()
<commit_before>from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('socket') def hook_socket(module): self.assertEqual(module.__name__, 'socket') invoked.append(1) self.assertEqual(len(invoked), 0) import socket self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main() <commit_msg>Adjust test to use different module as socket imported by coverage tools.<commit_after>
from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('this') def hook_this(module): self.assertEqual(module.__name__, 'this') invoked.append(1) self.assertEqual(len(invoked), 0) import this self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main()
from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('socket') def hook_socket(module): self.assertEqual(module.__name__, 'socket') invoked.append(1) self.assertEqual(len(invoked), 0) import socket self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main() Adjust test to use different module as socket imported by coverage tools.from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('this') def hook_this(module): self.assertEqual(module.__name__, 'this') invoked.append(1) self.assertEqual(len(invoked), 0) import this self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main()
<commit_before>from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('socket') def hook_socket(module): self.assertEqual(module.__name__, 'socket') invoked.append(1) self.assertEqual(len(invoked), 0) import socket self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main() <commit_msg>Adjust test to use different module as socket imported by coverage tools.<commit_after>from __future__ import print_function import unittest import wrapt class TestPostImportHooks(unittest.TestCase): def test_simple(self): invoked = [] @wrapt.when_imported('this') def hook_this(module): self.assertEqual(module.__name__, 'this') invoked.append(1) self.assertEqual(len(invoked), 0) import this self.assertEqual(len(invoked), 1) if __name__ == '__main__': unittest.main()
f84ee1a186157320668cf09df2449eba10e8a2fe
clean_analysis.py
clean_analysis.py
# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..')
# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *plot.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..')
Save cal images from clean-analysis
Save cal images from clean-analysis
Python
apache-2.0
jd-au/magmo-HI,jd-au/magmo-HI
# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..') Save cal images from clean-analysis
# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *plot.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..')
<commit_before># Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..') <commit_msg>Save cal images from clean-analysis<commit_after>
# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *plot.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..')
# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..') Save cal images from clean-analysis# Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *plot.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..')
<commit_before># Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..') <commit_msg>Save cal images from clean-analysis<commit_after># Remove all analysis output files. # # Author James Dempsey # Date 6 Aug 2016 import sys import os import shutil # ### Script starts here ### # Read day parameter if len(sys.argv) != 2: print("Incorrect number of parameters.") print("Usage: python clean_analysis.py day") exit(1) day = sys.argv[1] dayDirName = "day" + day # Delete the generated files print "Removing analysis files" os.chdir(dayDirName) os.system("rm *plot.png") os.system("rm *.vot") os.system("rm *.xml") os.system("rm *.pdf") os.chdir('..')
101a7c8e0e26089d6d1deb4e7728e4eb59274b74
app/main/forms.py
app/main/forms.py
from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ])
from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
Add InviteAdminForm with email_address and role fields
Add InviteAdminForm with email_address and role fields
Python
mit
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) Add InviteAdminForm with email_address and role fields
from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
<commit_before>from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) <commit_msg>Add InviteAdminForm with email_address and role fields<commit_after>
from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) Add InviteAdminForm with email_address and role fieldsfrom flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
<commit_before>from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) <commit_msg>Add InviteAdminForm with email_address and role fields<commit_after>from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
31bbec5d55437d36b78ca1c36dee19e74203695b
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow', 'h5py', 'PIL', 'absl-py'], )
#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.training.models', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow<2', 'h5py', 'Pillow', 'absl-py'], )
Add ffn.training.models and switch to Pillow.
Add ffn.training.models and switch to Pillow. It is still easier to use this repo via git clone rather than pip install, e.g. from Colab. But this seemed worth updating anyway.
Python
apache-2.0
google/ffn
#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow', 'h5py', 'PIL', 'absl-py'], ) Add ffn.training.models and switch to Pillow. It is still easier to use this repo via git clone rather than pip install, e.g. from Colab. But this seemed worth updating anyway.
#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.training.models', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow<2', 'h5py', 'Pillow', 'absl-py'], )
<commit_before>#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow', 'h5py', 'PIL', 'absl-py'], ) <commit_msg>Add ffn.training.models and switch to Pillow. It is still easier to use this repo via git clone rather than pip install, e.g. from Colab. But this seemed worth updating anyway.<commit_after>
#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.training.models', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow<2', 'h5py', 'Pillow', 'absl-py'], )
#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow', 'h5py', 'PIL', 'absl-py'], ) Add ffn.training.models and switch to Pillow. It is still easier to use this repo via git clone rather than pip install, e.g. from Colab. But this seemed worth updating anyway.#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.training.models', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow<2', 'h5py', 'Pillow', 'absl-py'], )
<commit_before>#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow', 'h5py', 'PIL', 'absl-py'], ) <commit_msg>Add ffn.training.models and switch to Pillow. It is still easier to use this repo via git clone rather than pip install, e.g. from Colab. But this seemed worth updating anyway.<commit_after>#!/usr/bin/env python from distutils.core import setup setup( name = 'ffn', version = '0.1.0', author = 'Michal Januszewski', author_email = 'mjanusz@google.com', packages = ['ffn', 'ffn.inference', 'ffn.training', 'ffn.training.models', 'ffn.utils'], scripts = ['build_coordinates.py', 'compute_partitions.py', 'run_inference.py', 'train.py'], url = 'https://github.com/google/ffn', license = 'LICENSE', description = 'Flood-Filling Networks for volumetric instance segmentation', long_description= open('README.md').read(), install_requires= ['scikit-image', 'scipy', 'numpy', 'tensorflow<2', 'h5py', 'Pillow', 'absl-py'], )
2bd9dde6502c360fb7f6b37efeb458b4e4b703c1
setup.py
setup.py
"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.5", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} )
"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.6", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} )
Increment version for new migrations..
Increment version for new migrations..
Python
mit
zehome/django-payzen,zehome/django-payzen
"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.5", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} ) Increment version for new migrations..
"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.6", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} )
<commit_before>"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.5", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} ) <commit_msg>Increment version for new migrations..<commit_after>
"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.6", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} )
"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.5", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} ) Increment version for new migrations.."""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.6", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} )
<commit_before>"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.5", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} ) <commit_msg>Increment version for new migrations..<commit_after>"""Setup file for easy installation""" import os from setuptools import setup, find_packages from tests import test_cmd ROOT = os.path.dirname(__file__) setup( name="django-payzen", version="1.0.6", description="Django app to manage payments with Payzen ETP", license='MIT', author="Bertrand Svetchine", author_email="bertrand.svetchine@gmail.com", url="https://github.com/bsvetchine/django-payzen", packages=find_packages(), include_package_data=True, install_requires=["Django"], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Framework :: Django", "Topic :: Software Development"], cmdclass={'test': test_cmd.TestCommand} )