commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
35a9e8f7ba101b9b36dc1ac3097e47b03e7cad89
setup.py
setup.py
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = filter(None, f.readlines()) with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = filter(None, f.readlines()) with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = f.read().split('\n') with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = f.read().split('\n') with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
Use f.read() instead of filter() for reading requirements
Use f.read() instead of filter() for reading requirements filter() has changed in Python 3 which breaks this.
Python
bsd-2-clause
praekelt/molo.commenting,praekelt/molo.commenting
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = filter(None, f.readlines()) with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = filter(None, f.readlines()) with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) Use f.read() instead of filter() for reading requirements filter() has changed in Python 3 which breaks this.
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = f.read().split('\n') with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = f.read().split('\n') with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
<commit_before>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = filter(None, f.readlines()) with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = filter(None, f.readlines()) with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) <commit_msg>Use f.read() instead of filter() for reading requirements filter() has changed in Python 3 which breaks this.<commit_after>
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = f.read().split('\n') with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = f.read().split('\n') with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = filter(None, f.readlines()) with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = filter(None, f.readlines()) with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) Use f.read() instead of filter() for reading requirements filter() has changed in Python 3 which breaks this.import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = f.read().split('\n') with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = f.read().split('\n') with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
<commit_before>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = filter(None, f.readlines()) with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = filter(None, f.readlines()) with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) <commit_msg>Use f.read() instead of filter() for reading requirements filter() has changed in Python 3 which breaks this.<commit_after>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: readme = f.read() with open(os.path.join(here, 'requirements.txt')) as f: requires = f.read().split('\n') with open(os.path.join(here, 'requirements-dev.txt')) as f: requires_dev = f.read().split('\n') with open(os.path.join(here, 'VERSION')) as f: version = f.read().strip() setup(name='molo.commenting', version=version, description=('Comments helpers for sites built with Molo.'), long_description=readme, classifiers=[ "Programming Language :: Python", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.commenting', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
93644ba1850186eabcfea6bdab47e3e3d223becf
setup.py
setup.py
from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = req_file.read().split('\n') with open('requirements-dev.txt') as req_file: requires_dev = req_file.read().split('\n') with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = [req for req in req_file.read().split('\n') if req] with open('requirements-dev.txt') as req_file: requires_dev = [req for req in req_file.read().split('\n') if req] with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
Remove empty string from requirements list
Remove empty string from requirements list When we moved to Python 3 we used this simpler method to read the requirements file. However we need to remove the empty/Falsey elements from the list. This fixes the error: ``` Failed building wheel for molo.yourwords ```
Python
bsd-2-clause
praekelt/molo.yourwords,praekelt/molo.yourwords
from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = req_file.read().split('\n') with open('requirements-dev.txt') as req_file: requires_dev = req_file.read().split('\n') with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) Remove empty string from requirements list When we moved to Python 3 we used this simpler method to read the requirements file. However we need to remove the empty/Falsey elements from the list. This fixes the error: ``` Failed building wheel for molo.yourwords ```
from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = [req for req in req_file.read().split('\n') if req] with open('requirements-dev.txt') as req_file: requires_dev = [req for req in req_file.read().split('\n') if req] with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
<commit_before>from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = req_file.read().split('\n') with open('requirements-dev.txt') as req_file: requires_dev = req_file.read().split('\n') with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) <commit_msg>Remove empty string from requirements list When we moved to Python 3 we used this simpler method to read the requirements file. However we need to remove the empty/Falsey elements from the list. This fixes the error: ``` Failed building wheel for molo.yourwords ```<commit_after>
from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = [req for req in req_file.read().split('\n') if req] with open('requirements-dev.txt') as req_file: requires_dev = [req for req in req_file.read().split('\n') if req] with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = req_file.read().split('\n') with open('requirements-dev.txt') as req_file: requires_dev = req_file.read().split('\n') with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) Remove empty string from requirements list When we moved to Python 3 we used this simpler method to read the requirements file. However we need to remove the empty/Falsey elements from the list. This fixes the error: ``` Failed building wheel for molo.yourwords ```from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = [req for req in req_file.read().split('\n') if req] with open('requirements-dev.txt') as req_file: requires_dev = [req for req in req_file.read().split('\n') if req] with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
<commit_before>from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = req_file.read().split('\n') with open('requirements-dev.txt') as req_file: requires_dev = req_file.read().split('\n') with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={}) <commit_msg>Remove empty string from requirements list When we moved to Python 3 we used this simpler method to read the requirements file. However we need to remove the empty/Falsey elements from the list. This fixes the error: ``` Failed building wheel for molo.yourwords ```<commit_after>from setuptools import setup, find_packages with open('README.rst') as readme_file: readme = readme_file.read() with open('requirements.txt') as req_file: requires = [req for req in req_file.read().split('\n') if req] with open('requirements-dev.txt') as req_file: requires_dev = [req for req in req_file.read().split('\n') if req] with open('VERSION') as fp: version = fp.read().strip() setup(name='molo.yourwords', version=version, description=('A Molo module that enables user generated content ' 'competitions'), long_description=readme, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.6", "Framework :: Django", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Praekelt Foundation', author_email='dev@praekelt.com', url='http://github.com/praekelt/molo.yourwords', license='BSD', keywords='praekelt, mobi, web, django', packages=find_packages(), include_package_data=True, zip_safe=False, namespace_packages=['molo'], install_requires=requires, tests_require=requires_dev, entry_points={})
4e84dc31d52412a9d58d5f0c54f5514c0eac5137
console.py
console.py
from dumpster import Dumpster import os i = input('\r>') if i == 'list': cwd = os.getcwd() lcd = os.listdir() dump = '' for file in lcd: if '.dmp' in file: dump+= ' '+file print(dump)
from dumpster import Dumpster import os running = True selected = '' while running: #cwd = os.getcwd() i = input('\r%s>'%(selected)) if i == 'exit': running = False if i[0:6] == 'create': name = i[7:] Dumpster(name).write_to_dump() if i == 'list': if selected is 'none': #list currrent working directory dirs = '' lcd = os.listdir() for file in lcd: if '.dmp' in file: dirs+= ' '+file.strip('.dmp') print(dirs) else: #list selected dump #....................................... if i[0:6] == 'select': name = i[7:] selected = name
Select and Create and List
Select and Create and List
Python
apache-2.0
SirGuyOfGibson/source-dump
from dumpster import Dumpster import os i = input('\r>') if i == 'list': cwd = os.getcwd() lcd = os.listdir() dump = '' for file in lcd: if '.dmp' in file: dump+= ' '+file print(dump) Select and Create and List
from dumpster import Dumpster import os running = True selected = '' while running: #cwd = os.getcwd() i = input('\r%s>'%(selected)) if i == 'exit': running = False if i[0:6] == 'create': name = i[7:] Dumpster(name).write_to_dump() if i == 'list': if selected is 'none': #list currrent working directory dirs = '' lcd = os.listdir() for file in lcd: if '.dmp' in file: dirs+= ' '+file.strip('.dmp') print(dirs) else: #list selected dump #....................................... if i[0:6] == 'select': name = i[7:] selected = name
<commit_before>from dumpster import Dumpster import os i = input('\r>') if i == 'list': cwd = os.getcwd() lcd = os.listdir() dump = '' for file in lcd: if '.dmp' in file: dump+= ' '+file print(dump) <commit_msg>Select and Create and List<commit_after>
from dumpster import Dumpster import os running = True selected = '' while running: #cwd = os.getcwd() i = input('\r%s>'%(selected)) if i == 'exit': running = False if i[0:6] == 'create': name = i[7:] Dumpster(name).write_to_dump() if i == 'list': if selected is 'none': #list currrent working directory dirs = '' lcd = os.listdir() for file in lcd: if '.dmp' in file: dirs+= ' '+file.strip('.dmp') print(dirs) else: #list selected dump #....................................... if i[0:6] == 'select': name = i[7:] selected = name
from dumpster import Dumpster import os i = input('\r>') if i == 'list': cwd = os.getcwd() lcd = os.listdir() dump = '' for file in lcd: if '.dmp' in file: dump+= ' '+file print(dump) Select and Create and Listfrom dumpster import Dumpster import os running = True selected = '' while running: #cwd = os.getcwd() i = input('\r%s>'%(selected)) if i == 'exit': running = False if i[0:6] == 'create': name = i[7:] Dumpster(name).write_to_dump() if i == 'list': if selected is 'none': #list currrent working directory dirs = '' lcd = os.listdir() for file in lcd: if '.dmp' in file: dirs+= ' '+file.strip('.dmp') print(dirs) else: #list selected dump #....................................... if i[0:6] == 'select': name = i[7:] selected = name
<commit_before>from dumpster import Dumpster import os i = input('\r>') if i == 'list': cwd = os.getcwd() lcd = os.listdir() dump = '' for file in lcd: if '.dmp' in file: dump+= ' '+file print(dump) <commit_msg>Select and Create and List<commit_after>from dumpster import Dumpster import os running = True selected = '' while running: #cwd = os.getcwd() i = input('\r%s>'%(selected)) if i == 'exit': running = False if i[0:6] == 'create': name = i[7:] Dumpster(name).write_to_dump() if i == 'list': if selected is 'none': #list currrent working directory dirs = '' lcd = os.listdir() for file in lcd: if '.dmp' in file: dirs+= ' '+file.strip('.dmp') print(dirs) else: #list selected dump #....................................... if i[0:6] == 'select': name = i[7:] selected = name
82cfbc71873b652d64e04b01c36b1cd9d06b2f44
setup.py
setup.py
from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.0.4' )
from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.1.0' )
Fix the version to correct one
Fix the version to correct one
Python
mit
datasciencebr/serenata-toolbox
from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.0.4' ) Fix the version to correct one
from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.1.0' )
<commit_before>from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.0.4' ) <commit_msg>Fix the version to correct one<commit_after>
from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.1.0' )
from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.0.4' ) Fix the version to correct onefrom setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.1.0' )
<commit_before>from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.0.4' ) <commit_msg>Fix the version to correct one<commit_after>from setuptools import setup REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox' setup( author='Serenata de Amor', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'boto3', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL), name='serenata-toolbox', packages=['serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets'], url=REPO_URL, version='9.1.0' )
944746bd3e6b40b5ceb8ef974df6c26e550318cb
paradrop/tools/pdlog/pdlog/main.py
paradrop/tools/pdlog/pdlog/main.py
import sys import argparse import json import urllib import subprocess LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() return_code = proc.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main()
import sys import argparse import json import urllib import subprocess from time import sleep LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] while (True): try: proc = subprocess.Popen(cmd, \ stdout=subprocess.PIPE, \ universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() proc.wait() except subprocess.CalledProcessError: print 'Failed to open the log file, will try again...' sleep(2) continue def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main()
Make sure the paradrop.pdlog retry when the log file does not exist
Make sure the paradrop.pdlog retry when the log file does not exist
Python
apache-2.0
ParadropLabs/Paradrop,ParadropLabs/Paradrop,ParadropLabs/Paradrop
import sys import argparse import json import urllib import subprocess LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() return_code = proc.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main() Make sure the paradrop.pdlog retry when the log file does not exist
import sys import argparse import json import urllib import subprocess from time import sleep LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] while (True): try: proc = subprocess.Popen(cmd, \ stdout=subprocess.PIPE, \ universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() proc.wait() except subprocess.CalledProcessError: print 'Failed to open the log file, will try again...' sleep(2) continue def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main()
<commit_before>import sys import argparse import json import urllib import subprocess LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() return_code = proc.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main() <commit_msg>Make sure the paradrop.pdlog retry when the log file does not exist<commit_after>
import sys import argparse import json import urllib import subprocess from time import sleep LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] while (True): try: proc = subprocess.Popen(cmd, \ stdout=subprocess.PIPE, \ universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() proc.wait() except subprocess.CalledProcessError: print 'Failed to open the log file, will try again...' sleep(2) continue def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main()
import sys import argparse import json import urllib import subprocess LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() return_code = proc.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main() Make sure the paradrop.pdlog retry when the log file does not existimport sys import argparse import json import urllib import subprocess from time import sleep LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] while (True): try: proc = subprocess.Popen(cmd, \ stdout=subprocess.PIPE, \ universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() proc.wait() except subprocess.CalledProcessError: print 'Failed to open the log file, will try again...' sleep(2) continue def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main()
<commit_before>import sys import argparse import json import urllib import subprocess LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() return_code = proc.wait() if return_code: raise subprocess.CalledProcessError(return_code, cmd) def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main() <commit_msg>Make sure the paradrop.pdlog retry when the log file does not exist<commit_after>import sys import argparse import json import urllib import subprocess from time import sleep LOG_FILE = "/var/snap/paradrop-daemon/common/logs/log" def parseLine(line): try: data = json.loads(line) msg = urllib.unquote(data['message']) print(msg) except: pass def runTail(logFile): cmd = ['tail', '-n', '100', '-f', LOG_FILE] while (True): try: proc = subprocess.Popen(cmd, \ stdout=subprocess.PIPE, \ universal_newlines=True) for line in iter(proc.stdout.readline, ''): yield line proc.stdout.close() proc.wait() except subprocess.CalledProcessError: print 'Failed to open the log file, will try again...' sleep(2) continue def main(): p = argparse.ArgumentParser(description='Paradrop log tool') p.add_argument('-f', help='Wait for additional data to be appended to the log file when end of file is reached', action='store_true', dest='f') args = p.parse_args() try: if args.f: for line in runTail(LOG_FILE): parseLine(line) else: with open(LOG_FILE, "r") as inputFile: for line in inputFile: parseLine(line) except KeyboardInterrupt: sys.exit(0) if __name__ == "__main__": main()
b4a179625dc852b97203eb4bc02fb1a812d5f9b1
setup.py
setup.py
# -*- coding: utf-8 -*- import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], )
import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], )
Remove utf pragma since we are py3 only
Remove utf pragma since we are py3 only
Python
bsd-3-clause
beezz/pg_bawler,beezz/pg_bawler
# -*- coding: utf-8 -*- import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], ) Remove utf pragma since we are py3 only
import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], )
<commit_before># -*- coding: utf-8 -*- import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], ) <commit_msg>Remove utf pragma since we are py3 only<commit_after>
import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], )
# -*- coding: utf-8 -*- import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], ) Remove utf pragma since we are py3 onlyimport setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], )
<commit_before># -*- coding: utf-8 -*- import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], ) <commit_msg>Remove utf pragma since we are py3 only<commit_after>import setuptools setuptools.setup( name='pg_bawler', version='0.1.0', author='Michal Kuffa', author_email='michal.kuffa@gmail.com', description='Notify/listen python helpers for postgresql.', long_description=open('README.rst').read(), packages=setuptools.find_packages(), install_requires=[], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', ], )
1e3e9839515b5769ffe71df67cbd1aa2c55f2e72
setup.py
setup.py
from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', '/*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') )
from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', './*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') )
Include kv files in package.
Include kv files in package.
Python
mit
matham/cplcom
from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', '/*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') ) Include kv files in package.
from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', './*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') )
<commit_before>from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', '/*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') ) <commit_msg>Include kv files in package.<commit_after>
from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', './*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') )
from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', '/*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') ) Include kv files in package.from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', './*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') )
<commit_before>from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', '/*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') ) <commit_msg>Include kv files in package.<commit_after>from setuptools import setup, find_packages import cplcom setup( name='CPLCom', version=cplcom.__version__, packages=find_packages(), package_data={'cplcom': ['../media/*', './*.kv']}, install_requires=['moa', 'kivy'], author='Matthew Einhorn', author_email='moiein2000@gmail.com', license='MIT', description=( 'Project for common widgets used with Moa.') )
7db47e7b87305977d48be3f610004aed1626969a
setup.py
setup.py
#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Topic :: Terminals', ] )
#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Terminals', ] )
Change PyPI development status from pre-alpha to beta.
Change PyPI development status from pre-alpha to beta.
Python
bsd-3-clause
msabramo/colorama,msabramo/colorama
#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Topic :: Terminals', ] ) Change PyPI development status from pre-alpha to beta.
#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Terminals', ] )
<commit_before>#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Topic :: Terminals', ] ) <commit_msg>Change PyPI development status from pre-alpha to beta.<commit_after>
#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Terminals', ] )
#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Topic :: Terminals', ] ) Change PyPI development status from pre-alpha to beta.#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Terminals', ] )
<commit_before>#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Topic :: Terminals', ] ) <commit_msg>Change PyPI development status from pre-alpha to beta.<commit_after>#!/usr/bin/env python from os.path import dirname, join from distutils.core import setup from colorama import VERSION NAME = 'colorama' def get_long_description(filename): readme = join(dirname(__file__), filename) return open(readme).read() setup( name=NAME, version=VERSION, description='Cross-platform colored terminal text.', long_description=get_long_description('README.txt'), keywords='color colour terminal text ansi windows crossplatform xplatform', author='Jonathan Hartley', author_email='tartley@tartley.com', url='http://code.google.com/p/colorama/', license='BSD', packages=[NAME], # see classifiers http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Terminals', ] )
531e524d39b63730e06d171b21cc44b3e6ad2212
setup.py
setup.py
from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.0', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
Use the new version of traitlets.
MAINT: Use the new version of traitlets.
Python
apache-2.0
quantopian/serializable-traitlets
from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.0', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() MAINT: Use the new version of traitlets.
from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
<commit_before>from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.0', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() <commit_msg>MAINT: Use the new version of traitlets.<commit_after>
from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.0', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() MAINT: Use the new version of traitlets.from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
<commit_before>from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.0', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() <commit_msg>MAINT: Use the new version of traitlets.<commit_after>from setuptools import setup from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.0.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=[ 'straitlets', ], include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
94d5f8372d34c0d1416da3a6b39a91ec99de8752
setup.py
setup.py
import pypandoc from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=pypandoc.convert('README.md', 'rst'), author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], )
try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except: long_description = open('README.md').read() from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=long_description, author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], )
Fix error missing 'pypandoc' module when installing the module
Fix error missing 'pypandoc' module when installing the module
Python
mit
marcoacierno/django-schedulermanager
import pypandoc from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=pypandoc.convert('README.md', 'rst'), author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], ) Fix error missing 'pypandoc' module when installing the module
try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except: long_description = open('README.md').read() from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=long_description, author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], )
<commit_before>import pypandoc from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=pypandoc.convert('README.md', 'rst'), author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], ) <commit_msg>Fix error missing 'pypandoc' module when installing the module<commit_after>
try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except: long_description = open('README.md').read() from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=long_description, author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], )
import pypandoc from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=pypandoc.convert('README.md', 'rst'), author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], ) Fix error missing 'pypandoc' module when installing the moduletry: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except: long_description = open('README.md').read() from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=long_description, author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], )
<commit_before>import pypandoc from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=pypandoc.convert('README.md', 'rst'), author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], ) <commit_msg>Fix error missing 'pypandoc' module when installing the module<commit_after>try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except: long_description = open('README.md').read() from setuptools import setup, find_packages VERSION = '1.0' setup( name='django_schedulermanager', version=VERSION, description='A package that allows you to schedule and unschedule jobs', long_description=long_description, author='Marco Acierno', author_email='marcoaciernoemail@gmail.com', packages=find_packages(), install_requires=['django-rq', 'rq-scheduler'], url='https://github.com/marcoacierno/django-schedulermanager/', license='MIT', keywords=['django_schedulermanager', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.8', 'Framework :: Django', ], )
31f4ef14895b9d69ac613fcc0b051f99be76c7b9
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', )
#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests[socks]', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', )
Install socks proxy required dependencies
feat: Install socks proxy required dependencies
Python
bsd-2-clause
kylef/goji
#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', ) feat: Install socks proxy required dependencies
#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests[socks]', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', )
<commit_before>#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', ) <commit_msg>feat: Install socks proxy required dependencies<commit_after>
#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests[socks]', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', )
#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', ) feat: Install socks proxy required dependencies#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests[socks]', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', )
<commit_before>#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', ) <commit_msg>feat: Install socks proxy required dependencies<commit_after>#!/usr/bin/env python from setuptools import setup setup( name='goji', version='0.2.1', url='https://github.com/kylef/goji', author='Kyle Fuller', author_email='kyle@fuller.li', packages=('goji',), install_requires=( 'requests', 'requests[socks]', 'requests-html', 'Click', 'click-datetime', 'six' ), entry_points={ 'console_scripts': ( 'goji = goji.commands:cli', ) }, test_suite='tests', )
8307590d20f3a2bdb7efaa7679bfd37d83358475
setup.py
setup.py
#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4:
#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], data_files=[ ('', ['README.md', 'LICENSE']), ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4:
Include README.md and LICENSE in the package
Include README.md and LICENSE in the package
Python
mit
bastianh/evelink,Morloth1274/EVE-Online-POCO-manager,FashtimeDotCom/evelink,ayust/evelink,zigdon/evelink
#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4: Include README.md and LICENSE in the package
#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], data_files=[ ('', ['README.md', 'LICENSE']), ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4:
<commit_before>#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4: <commit_msg>Include README.md and LICENSE in the package<commit_after>
#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], data_files=[ ('', ['README.md', 'LICENSE']), ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4:
#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4: Include README.md and LICENSE in the package#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], data_files=[ ('', ['README.md', 'LICENSE']), ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4:
<commit_before>#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4: <commit_msg>Include README.md and LICENSE in the package<commit_after>#!/usr/bin/python import os from distutils.core import setup from evelink import __version__ __readme_path = os.path.join(os.path.dirname(__file__), "README.md") __readme_contents = open(__readme_path).read() setup( name="EVELink", version=__version__, description="Python Bindings for the EVE Online API", long_description=__readme_contents, license="MIT License", author="Valkyries of Night", author_email="d-eve-lopment@googlegroups.com", maintainer="Amber Yust", maintainer_email="amber.yust@gmail.com", url="https://github.com/eve-val/evelink", download_url="https://github.com/eve-val/evelink/downloads", packages=[ "evelink", "evelink.cache", "evelink.parsing", ], data_files=[ ('', ['README.md', 'LICENSE']), ], scripts=["bin/evelink"], provides=["evelink"], classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Games/Entertainment", "Topic :: Software Development :: Libraries :: Python Modules", ], ) # vim: set et ts=4 sts=4 sw=4:
87856b925d436df302eed4a65eac139ee394b427
setup.py
setup.py
#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/afnumpy/tarball/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], )
#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], )
Correct the pip download URL
Correct the pip download URL
Python
bsd-2-clause
FilipeMaia/afnumpy,daurer/afnumpy
#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/afnumpy/tarball/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], ) Correct the pip download URL
#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], )
<commit_before>#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/afnumpy/tarball/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], ) <commit_msg>Correct the pip download URL<commit_after>
#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], )
#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/afnumpy/tarball/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], ) Correct the pip download URL#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], )
<commit_before>#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/afnumpy/tarball/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], ) <commit_msg>Correct the pip download URL<commit_after>#!/usr/bin/env python """ setup.py file for afnumpy """ from distutils.core import setup from afnumpy import __version__ setup (name = 'afnumpy', version = __version__, author = "Filipe Maia", author_email = "filipe.c.maia@gmail.com", url = 'https://github.com/FilipeMaia/afnumpy', download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__, keywords = ['arrayfire', 'numpy', 'GPU'], description = """A GPU-ready drop-in replacement for numpy""", packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"], install_requires=['arrayfire', 'numpy'], )
cd0f144573349da80a179bbdd4a3d6e561980fb4
setup.py
setup.py
from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', 'nose', ], entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' )
from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', ], extras_require = { 'doc': ["sphinx"], 'test': ["nose"], }, entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' )
Make nose and sphinx optional dependencies.
Make nose and sphinx optional dependencies.
Python
mit
bpinsard/dcmstack
from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', 'nose', ], entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' ) Make nose and sphinx optional dependencies.
from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', ], extras_require = { 'doc': ["sphinx"], 'test': ["nose"], }, entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' )
<commit_before>from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', 'nose', ], entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' ) <commit_msg>Make nose and sphinx optional dependencies.<commit_after>
from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', ], extras_require = { 'doc': ["sphinx"], 'test': ["nose"], }, entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' )
from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', 'nose', ], entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' ) Make nose and sphinx optional dependencies.from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', ], extras_require = { 'doc': ["sphinx"], 'test': ["nose"], }, entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' )
<commit_before>from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', 'nose', ], entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' ) <commit_msg>Make nose and sphinx optional dependencies.<commit_after>from setuptools import setup, find_packages setup(name='dcmstack', description='Stack DICOM images into volumes', version='0.6.dev', author='Brendan Moloney', author_email='moloney@ohsu.edu', packages=find_packages('src'), package_dir = {'':'src'}, install_requires=['pydicom >= 0.9.7', 'nibabel', ], extras_require = { 'doc': ["sphinx"], 'test': ["nose"], }, entry_points = {'console_scripts' : \ ['dcmstack = dcmstack.dcmstack_cli:main', 'nitool = dcmstack.nitool_cli:main', ], }, test_suite = 'nose.collector' )
dfdb53248e76c9f12482141b9955f02b63e8744d
setup.py
setup.py
from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.5.2', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.6.5', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
Raise minimum requirement to force minimum TG version
Raise minimum requirement to force minimum TG version
Python
mit
TurboGears/tgext.admin,TurboGears/tgext.admin
from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.5.2', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) Raise minimum requirement to force minimum TG version
from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.6.5', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
<commit_before>from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.5.2', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) <commit_msg>Raise minimum requirement to force minimum TG version<commit_after>
from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.6.5', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.5.2', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) Raise minimum requirement to force minimum TG versionfrom setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.6.5', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
<commit_before>from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.5.2', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, ) <commit_msg>Raise minimum requirement to force minimum TG version<commit_after>from setuptools import setup, find_packages import os version = '0.5.4' here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'docs/HISTORY.txt')).read() except IOError: README = CHANGES = '' setup(name='tgext.admin', version=version, description="Admin Controller add-on for basic TG identity model.", long_description=README + "\n" + CHANGES, # Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords='turbogears2.extension, TG2, TG, sprox, Rest, internet, admin', author='Christopher Perkins', author_email='chris@percious.com', url='https://github.com/TurboGears/tgext.admin', license='MIT', packages=find_packages(exclude=['ez_setup']), namespace_packages=['tgext'], include_package_data=True, zip_safe=True, install_requires=[ 'setuptools', 'tgext.crud>=0.6.5', # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
05488bf9d43f3952d44686f1eccf2616a99805ab
setup.py
setup.py
import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), long_description_content_type='text/markdown', author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
Add long description content-type for PyPI
Add long description content-type for PyPI
Python
mit
twang817/rubyenv
import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], ) Add long description content-type for PyPI
import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), long_description_content_type='text/markdown', author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
<commit_before>import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], ) <commit_msg>Add long description content-type for PyPI<commit_after>
import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), long_description_content_type='text/markdown', author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], ) Add long description content-type for PyPIimport io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), long_description_content_type='text/markdown', author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
<commit_before>import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], ) <commit_msg>Add long description content-type for PyPI<commit_after>import io import os from setuptools import setup, find_packages version = io.open('rubyenv/_version.py').readlines()[-1].split()[-1].strip('"\'') setup( name='rubyenv', version=version, description='manage ruby in your virtualenv', long_description=io.open('README.md', encoding='utf-8').read(), long_description_content_type='text/markdown', author='Tommy Wang', author_email='twang@august8.net', url='http://github.com/twang817/rubyenv', download_url='https://github.com/twang817/rubyenv/tarball/{version}'.format(version=version), packages=find_packages(), install_requires=['gitpython'], include_package_data=True, entry_points={ 'console_scripts': ['rubyenv = rubyenv:main'], }, license='MIT', platforms=['any'], keywords='ruby virtualenv', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
2884b8c7a2cfd4b18f58ce0807e46d1fad0aa4a9
setup.py
setup.py
# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ] install_requires=[ ], zip_safe=False, )
# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires=[ 'djangorestframework' ], zip_safe=False, )
Set djangorestframework as a install requirement
Set djangorestframework as a install requirement
Python
mit
adonescunha/django-todomvc
# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ] install_requires=[ ], zip_safe=False, ) Set djangorestframework as a install requirement
# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires=[ 'djangorestframework' ], zip_safe=False, )
<commit_before># -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ] install_requires=[ ], zip_safe=False, ) <commit_msg>Set djangorestframework as a install requirement<commit_after>
# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires=[ 'djangorestframework' ], zip_safe=False, )
# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ] install_requires=[ ], zip_safe=False, ) Set djangorestframework as a install requirement# -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires=[ 'djangorestframework' ], zip_safe=False, )
<commit_before># -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ] install_requires=[ ], zip_safe=False, ) <commit_msg>Set djangorestframework as a install requirement<commit_after># -*- coding: utf-8 -*8- from setuptools import setup, find_packages from todomvc import version setup( name='django-todomvc', version=version.to_str(), description='TodoMVC django app', author='Adones Cunha', author_email='adonescunha@gmail.com', url='https://github.com/adonescunha/django-todomvc', packages=find_packages(exclude=['tests']), package_data={ 'todomvc': [ ], }, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires=[ 'djangorestframework' ], zip_safe=False, )
e94175f194f134ad9f9a23514d28806037f9e728
setup.py
setup.py
#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.1', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )
#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.2', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )
Update version to 0.1 -> 0.2
Update version to 0.1 -> 0.2
Python
apache-2.0
gihankarunarathne/CurwMySQLAdapter
#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.1', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )Update version to 0.1 -> 0.2
#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.2', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )
<commit_before>#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.1', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )<commit_msg>Update version to 0.1 -> 0.2<commit_after>
#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.2', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )
#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.1', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )Update version to 0.1 -> 0.2#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.2', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )
<commit_before>#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.1', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )<commit_msg>Update version to 0.1 -> 0.2<commit_after>#!/usr/bin/env python import io from setuptools import setup, find_packages with open('./README.md') as f: readme = f.read() setup(name='curwmysqladapter', version='0.2', description='MySQL Adapter for storing Weather Timeseries', long_description=readme, url='http://github.com/gihankarunarathne/CurwMySQLAdapter', author='Gihan Karunarathne', author_email='gckarunarathne@gmail.com', license='Apache-2.0', packages=['curwmysqladapter'], install_requires=[ 'PyMySQL', ], test_suite='nose.collector', tests_require=[ 'nose', 'unittest2', ], zip_safe=False )
a5d1ad7b653bb266dab7d38ff7f65ac8c4cabaeb
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages # use requirements.txt for dependencies with open('requirements.txt') as f: required = map(lambda s: s.strip(), f.readlines()) with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, install_requires=required, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )
Add dependencies to installer via requirements.txt
Add dependencies to installer via requirements.txt
Python
mit
bear/ronkyuu,bear/ronkyuu
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )Add dependencies to installer via requirements.txt
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages # use requirements.txt for dependencies with open('requirements.txt') as f: required = map(lambda s: s.strip(), f.readlines()) with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, install_requires=required, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )<commit_msg>Add dependencies to installer via requirements.txt<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages # use requirements.txt for dependencies with open('requirements.txt') as f: required = map(lambda s: s.strip(), f.readlines()) with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, install_requires=required, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )Add dependencies to installer via requirements.txt#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages # use requirements.txt for dependencies with open('requirements.txt') as f: required = map(lambda s: s.strip(), f.readlines()) with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, install_requires=required, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )<commit_msg>Add dependencies to installer via requirements.txt<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages # use requirements.txt for dependencies with open('requirements.txt') as f: required = map(lambda s: s.strip(), f.readlines()) with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='ronkyuu', version='0.1.0', description='Webmention Manager', long_description=readme, install_requires=required, author='Mike Taylor', author_email='bear@bear.im', url='https://github.com/bear/ronkyuu', license=license, packages=find_packages(exclude=('tests', 'docs')) )
4bcaff8e452973093a630d93086ea14636e97fc4
tests/conftest.py
tests/conftest.py
import pytest import tempfile import os import ConfigParser def getConfig(optionname,thedefault,section,configfile): """read an option from a config file or set a default send 'thedefault' as the data class you want to get a string back i.e. 'True' will return a string True will return a bool 1 will return an int """ #getConfig('something','adefaultvalue') retvalue=thedefault opttype=type(thedefault) if os.path.isfile(configfile): config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option(section,optionname): if opttype==bool: retvalue=config.getboolean(section,optionname) elif opttype==int: retvalue=config.getint(section,optionname) elif opttype==float: retvalue=config.getfloat(section,optionname) else: retvalue=config.get(section,optionname) return retvalue @pytest.fixture def options(): options=dict() configFile='setup.cfg' if pytest.config.inifile: configFile=str(pytest.config.inifile) options["esserver"]=getConfig('esserver','localhost:9200','mozdef',configFile) options["loginput"]=getConfig('loginput','localhost:8080','mozdef',configFile) options["webuiurl"]=getConfig('webuiurl','http://localhost/','mozdef',configFile) options["kibanaurl"]=getConfig('kibanaurl','http://localhost:9090/','mozdef',configFile) if pytest.config.option.verbose > 0: options["verbose"]=True print('Using options: \n\t%r' % options) else: options["verbose"]=False return options @pytest.fixture() def cleandir(): newpath = tempfile.mkdtemp() os.chdir(newpath) def pytest_report_header(config): if config.option.verbose > 0: return ["reporting verbose test output"] def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." )
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation # # Contributors: # Brandon Myers bmyers@mozilla.com def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." )
Remove unused config options in tests
Remove unused config options in tests
Python
mpl-2.0
ameihm0912/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,mozilla/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,mozilla/MozDef,mpurzynski/MozDef,ameihm0912/MozDef,mozilla/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,ameihm0912/MozDef,jeffbryner/MozDef,mozilla/MozDef
import pytest import tempfile import os import ConfigParser def getConfig(optionname,thedefault,section,configfile): """read an option from a config file or set a default send 'thedefault' as the data class you want to get a string back i.e. 'True' will return a string True will return a bool 1 will return an int """ #getConfig('something','adefaultvalue') retvalue=thedefault opttype=type(thedefault) if os.path.isfile(configfile): config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option(section,optionname): if opttype==bool: retvalue=config.getboolean(section,optionname) elif opttype==int: retvalue=config.getint(section,optionname) elif opttype==float: retvalue=config.getfloat(section,optionname) else: retvalue=config.get(section,optionname) return retvalue @pytest.fixture def options(): options=dict() configFile='setup.cfg' if pytest.config.inifile: configFile=str(pytest.config.inifile) options["esserver"]=getConfig('esserver','localhost:9200','mozdef',configFile) options["loginput"]=getConfig('loginput','localhost:8080','mozdef',configFile) options["webuiurl"]=getConfig('webuiurl','http://localhost/','mozdef',configFile) options["kibanaurl"]=getConfig('kibanaurl','http://localhost:9090/','mozdef',configFile) if pytest.config.option.verbose > 0: options["verbose"]=True print('Using options: \n\t%r' % options) else: options["verbose"]=False return options @pytest.fixture() def cleandir(): newpath = tempfile.mkdtemp() os.chdir(newpath) def pytest_report_header(config): if config.option.verbose > 0: return ["reporting verbose test output"] def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." ) Remove unused config options in tests
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation # # Contributors: # Brandon Myers bmyers@mozilla.com def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." )
<commit_before>import pytest import tempfile import os import ConfigParser def getConfig(optionname,thedefault,section,configfile): """read an option from a config file or set a default send 'thedefault' as the data class you want to get a string back i.e. 'True' will return a string True will return a bool 1 will return an int """ #getConfig('something','adefaultvalue') retvalue=thedefault opttype=type(thedefault) if os.path.isfile(configfile): config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option(section,optionname): if opttype==bool: retvalue=config.getboolean(section,optionname) elif opttype==int: retvalue=config.getint(section,optionname) elif opttype==float: retvalue=config.getfloat(section,optionname) else: retvalue=config.get(section,optionname) return retvalue @pytest.fixture def options(): options=dict() configFile='setup.cfg' if pytest.config.inifile: configFile=str(pytest.config.inifile) options["esserver"]=getConfig('esserver','localhost:9200','mozdef',configFile) options["loginput"]=getConfig('loginput','localhost:8080','mozdef',configFile) options["webuiurl"]=getConfig('webuiurl','http://localhost/','mozdef',configFile) options["kibanaurl"]=getConfig('kibanaurl','http://localhost:9090/','mozdef',configFile) if pytest.config.option.verbose > 0: options["verbose"]=True print('Using options: \n\t%r' % options) else: options["verbose"]=False return options @pytest.fixture() def cleandir(): newpath = tempfile.mkdtemp() os.chdir(newpath) def pytest_report_header(config): if config.option.verbose > 0: return ["reporting verbose test output"] def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." ) <commit_msg>Remove unused config options in tests<commit_after>
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation # # Contributors: # Brandon Myers bmyers@mozilla.com def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." )
import pytest import tempfile import os import ConfigParser def getConfig(optionname,thedefault,section,configfile): """read an option from a config file or set a default send 'thedefault' as the data class you want to get a string back i.e. 'True' will return a string True will return a bool 1 will return an int """ #getConfig('something','adefaultvalue') retvalue=thedefault opttype=type(thedefault) if os.path.isfile(configfile): config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option(section,optionname): if opttype==bool: retvalue=config.getboolean(section,optionname) elif opttype==int: retvalue=config.getint(section,optionname) elif opttype==float: retvalue=config.getfloat(section,optionname) else: retvalue=config.get(section,optionname) return retvalue @pytest.fixture def options(): options=dict() configFile='setup.cfg' if pytest.config.inifile: configFile=str(pytest.config.inifile) options["esserver"]=getConfig('esserver','localhost:9200','mozdef',configFile) options["loginput"]=getConfig('loginput','localhost:8080','mozdef',configFile) options["webuiurl"]=getConfig('webuiurl','http://localhost/','mozdef',configFile) options["kibanaurl"]=getConfig('kibanaurl','http://localhost:9090/','mozdef',configFile) if pytest.config.option.verbose > 0: options["verbose"]=True print('Using options: \n\t%r' % options) else: options["verbose"]=False return options @pytest.fixture() def cleandir(): newpath = tempfile.mkdtemp() os.chdir(newpath) def pytest_report_header(config): if config.option.verbose > 0: return ["reporting verbose test output"] def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." ) Remove unused config options in tests#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation # # Contributors: # Brandon Myers bmyers@mozilla.com def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." )
<commit_before>import pytest import tempfile import os import ConfigParser def getConfig(optionname,thedefault,section,configfile): """read an option from a config file or set a default send 'thedefault' as the data class you want to get a string back i.e. 'True' will return a string True will return a bool 1 will return an int """ #getConfig('something','adefaultvalue') retvalue=thedefault opttype=type(thedefault) if os.path.isfile(configfile): config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option(section,optionname): if opttype==bool: retvalue=config.getboolean(section,optionname) elif opttype==int: retvalue=config.getint(section,optionname) elif opttype==float: retvalue=config.getfloat(section,optionname) else: retvalue=config.get(section,optionname) return retvalue @pytest.fixture def options(): options=dict() configFile='setup.cfg' if pytest.config.inifile: configFile=str(pytest.config.inifile) options["esserver"]=getConfig('esserver','localhost:9200','mozdef',configFile) options["loginput"]=getConfig('loginput','localhost:8080','mozdef',configFile) options["webuiurl"]=getConfig('webuiurl','http://localhost/','mozdef',configFile) options["kibanaurl"]=getConfig('kibanaurl','http://localhost:9090/','mozdef',configFile) if pytest.config.option.verbose > 0: options["verbose"]=True print('Using options: \n\t%r' % options) else: options["verbose"]=False return options @pytest.fixture() def cleandir(): newpath = tempfile.mkdtemp() os.chdir(newpath) def pytest_report_header(config): if config.option.verbose > 0: return ["reporting verbose test output"] def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." ) <commit_msg>Remove unused config options in tests<commit_after>#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation # # Contributors: # Brandon Myers bmyers@mozilla.com def pytest_addoption(parser): parser.addoption( "--delete_indexes", action='store_true', default=False, help="A flag to indicate if we should delete all indexes in ES before each test. This could result in inconsistent tests if not specified." )
d7a347b0cee650d7b5cb6a0eca613da543e0e305
tests/conftest.py
tests/conftest.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from textwrap import dedent from flask import Flask, jsonify pytest_plugins = 'pytester' @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app @pytest.fixture def appdir(testdir): app_root = testdir.tmpdir test_root = app_root.mkdir('tests') def create_test_module(code, filename='test_app.py'): f = test_root.join(filename) f.write(dedent(code), ensure=True) return f testdir.create_test_module = create_test_module testdir.create_test_module(''' import pytest from flask import Flask @pytest.fixture def app(): app = Flask(__name__) return app ''', filename='conftest.py') return testdir
Add `appdir` fixture to simplify testing
Add `appdir` fixture to simplify testing
Python
mit
amateja/pytest-flask
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app Add `appdir` fixture to simplify testing
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from textwrap import dedent from flask import Flask, jsonify pytest_plugins = 'pytester' @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app @pytest.fixture def appdir(testdir): app_root = testdir.tmpdir test_root = app_root.mkdir('tests') def create_test_module(code, filename='test_app.py'): f = test_root.join(filename) f.write(dedent(code), ensure=True) return f testdir.create_test_module = create_test_module testdir.create_test_module(''' import pytest from flask import Flask @pytest.fixture def app(): app = Flask(__name__) return app ''', filename='conftest.py') return testdir
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app <commit_msg>Add `appdir` fixture to simplify testing<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from textwrap import dedent from flask import Flask, jsonify pytest_plugins = 'pytester' @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app @pytest.fixture def appdir(testdir): app_root = testdir.tmpdir test_root = app_root.mkdir('tests') def create_test_module(code, filename='test_app.py'): f = test_root.join(filename) f.write(dedent(code), ensure=True) return f testdir.create_test_module = create_test_module testdir.create_test_module(''' import pytest from flask import Flask @pytest.fixture def app(): app = Flask(__name__) return app ''', filename='conftest.py') return testdir
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app Add `appdir` fixture to simplify testing#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from textwrap import dedent from flask import Flask, jsonify pytest_plugins = 'pytester' @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app @pytest.fixture def appdir(testdir): app_root = testdir.tmpdir test_root = app_root.mkdir('tests') def create_test_module(code, filename='test_app.py'): f = test_root.join(filename) f.write(dedent(code), ensure=True) return f testdir.create_test_module = create_test_module testdir.create_test_module(''' import pytest from flask import Flask @pytest.fixture def app(): app = Flask(__name__) return app ''', filename='conftest.py') return testdir
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app <commit_msg>Add `appdir` fixture to simplify testing<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from textwrap import dedent from flask import Flask, jsonify pytest_plugins = 'pytester' @pytest.fixture def app(): app = Flask(__name__) app.config['SECRET_KEY'] = '42' @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app @pytest.fixture def appdir(testdir): app_root = testdir.tmpdir test_root = app_root.mkdir('tests') def create_test_module(code, filename='test_app.py'): f = test_root.join(filename) f.write(dedent(code), ensure=True) return f testdir.create_test_module = create_test_module testdir.create_test_module(''' import pytest from flask import Flask @pytest.fixture def app(): app = Flask(__name__) return app ''', filename='conftest.py') return testdir
1ce9d232e290c32f2c3e851617a89966f0e3eb87
lib/templatetags/baseurl.py
lib/templatetags/baseurl.py
import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html) for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup)
import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html, 'html.parser') for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup)
Change bs4 html parser to html.parser
Change bs4 html parser to html.parser This is to fix wrapping with <html> tags.
Python
mit
peterkuma/tjrapid,peterkuma/tjrapid,peterkuma/tjrapid
import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html) for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup) Change bs4 html parser to html.parser This is to fix wrapping with <html> tags.
import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html, 'html.parser') for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup)
<commit_before>import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html) for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup) <commit_msg>Change bs4 html parser to html.parser This is to fix wrapping with <html> tags.<commit_after>
import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html, 'html.parser') for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup)
import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html) for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup) Change bs4 html parser to html.parser This is to fix wrapping with <html> tags.import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html, 'html.parser') for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup)
<commit_before>import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html) for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup) <commit_msg>Change bs4 html parser to html.parser This is to fix wrapping with <html> tags.<commit_after>import re from bs4 import BeautifulSoup from django import template register = template.Library() @register.filter(is_safe=True) def baseurl(html, base): if not base.endswith('/'): base += '/' absurl = re.compile(r'\s*[a-zA-Z][a-zA-Z0-9\+\.\-]*:') # Starts with scheme:. def isabs(url): return url.startswith('/') or absurl.match(url) soup = BeautifulSoup(html, 'html.parser') for link in soup.findAll('a', href=True): if not isabs(link['href']): link['href'] = base + link['href'] for img in soup.findAll('img', src=True): if not isabs(img['src']): img['src'] = base + img['src'] elements = soup.findAll(style=True) # All styled elements. for e in elements: def func(m): url = m.group(2) if not isabs(url): url = base + url return m.group(1) + url + m.group(3) e['style'] = re.sub(r'''(url\(\s*)([^\s\)\"\']*)(\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*")([^\s\"]*)("\s*\))''', func, e['style']) e['style'] = re.sub(r'''(url\(\s*')([^\s\']*)('\s*\))''', func, e['style']) return str(soup)
10b58658f76c0d51c0ae091788db78de5204a284
example.py
example.py
import generation import show """ Choose the excitation signal which you want to generate and fill in the parameters xxx_sweep(fstart, fstop, sweep_time, fs), where: fstart is the start frequency fstop is the stop frequency sweep_time is the total length of sweep fs is the sampling frequency Note that the stop frequency must not be greater than half the sampling frequency (Nyquist-Shannon sampling theorem) Save the return value in a new variable which is the sweep vector. """ # For example x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz and save the vector in x. Now it is possible to figure the sweep vector in both the time and frequency domain simultaneously, using the 'show' function: show.sweep(x, sweep_time, fs). Note that 'sweep_time' and 'fs' have the same values as in 'generation' function. """ # For example show.sweep(x, 2, 44100)
#!/usr/bin/env python3 import generation import show # For example """Save the return value in a new variable which is the sweep vector. """ x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz. """ show.sweep(x, 2, 44100, "tfdomain")
Make executable and edit some docstrings
Make executable and edit some docstrings
Python
mit
spatialaudio/sweep,franzpl/sweep
import generation import show """ Choose the excitation signal which you want to generate and fill in the parameters xxx_sweep(fstart, fstop, sweep_time, fs), where: fstart is the start frequency fstop is the stop frequency sweep_time is the total length of sweep fs is the sampling frequency Note that the stop frequency must not be greater than half the sampling frequency (Nyquist-Shannon sampling theorem) Save the return value in a new variable which is the sweep vector. """ # For example x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz and save the vector in x. Now it is possible to figure the sweep vector in both the time and frequency domain simultaneously, using the 'show' function: show.sweep(x, sweep_time, fs). Note that 'sweep_time' and 'fs' have the same values as in 'generation' function. """ # For example show.sweep(x, 2, 44100) Make executable and edit some docstrings
#!/usr/bin/env python3 import generation import show # For example """Save the return value in a new variable which is the sweep vector. """ x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz. """ show.sweep(x, 2, 44100, "tfdomain")
<commit_before>import generation import show """ Choose the excitation signal which you want to generate and fill in the parameters xxx_sweep(fstart, fstop, sweep_time, fs), where: fstart is the start frequency fstop is the stop frequency sweep_time is the total length of sweep fs is the sampling frequency Note that the stop frequency must not be greater than half the sampling frequency (Nyquist-Shannon sampling theorem) Save the return value in a new variable which is the sweep vector. """ # For example x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz and save the vector in x. Now it is possible to figure the sweep vector in both the time and frequency domain simultaneously, using the 'show' function: show.sweep(x, sweep_time, fs). Note that 'sweep_time' and 'fs' have the same values as in 'generation' function. """ # For example show.sweep(x, 2, 44100) <commit_msg>Make executable and edit some docstrings<commit_after>
#!/usr/bin/env python3 import generation import show # For example """Save the return value in a new variable which is the sweep vector. """ x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz. """ show.sweep(x, 2, 44100, "tfdomain")
import generation import show """ Choose the excitation signal which you want to generate and fill in the parameters xxx_sweep(fstart, fstop, sweep_time, fs), where: fstart is the start frequency fstop is the stop frequency sweep_time is the total length of sweep fs is the sampling frequency Note that the stop frequency must not be greater than half the sampling frequency (Nyquist-Shannon sampling theorem) Save the return value in a new variable which is the sweep vector. """ # For example x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz and save the vector in x. Now it is possible to figure the sweep vector in both the time and frequency domain simultaneously, using the 'show' function: show.sweep(x, sweep_time, fs). Note that 'sweep_time' and 'fs' have the same values as in 'generation' function. """ # For example show.sweep(x, 2, 44100) Make executable and edit some docstrings#!/usr/bin/env python3 import generation import show # For example """Save the return value in a new variable which is the sweep vector. """ x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz. """ show.sweep(x, 2, 44100, "tfdomain")
<commit_before>import generation import show """ Choose the excitation signal which you want to generate and fill in the parameters xxx_sweep(fstart, fstop, sweep_time, fs), where: fstart is the start frequency fstop is the stop frequency sweep_time is the total length of sweep fs is the sampling frequency Note that the stop frequency must not be greater than half the sampling frequency (Nyquist-Shannon sampling theorem) Save the return value in a new variable which is the sweep vector. """ # For example x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz and save the vector in x. Now it is possible to figure the sweep vector in both the time and frequency domain simultaneously, using the 'show' function: show.sweep(x, sweep_time, fs). Note that 'sweep_time' and 'fs' have the same values as in 'generation' function. """ # For example show.sweep(x, 2, 44100) <commit_msg>Make executable and edit some docstrings<commit_after>#!/usr/bin/env python3 import generation import show # For example """Save the return value in a new variable which is the sweep vector. """ x = generation.log_sweep(1, 1000, 2, 44100) """We created a vector which sweeps from 1 Hz to 1000 Hz in 2 seconds at a sampling frequency of 44.1 kHz. """ show.sweep(x, 2, 44100, "tfdomain")
ab3d07cf5d169459515348777a68f825e182ba03
scripts/print_view_hierarchy.py
scripts/print_view_hierarchy.py
"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]') def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.GetCommandInterpreter().HandleCommand('po [[UIWindow keyWindow] recursiveDescription]', result) def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
Update view hierarchy script to use the interpreter as needed for tests.
Update view hierarchy script to use the interpreter as needed for tests.
Python
mit
mrhappyasthma/happydebugging,mrhappyasthma/HappyDebugging
"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]') def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv') Update view hierarchy script to use the interpreter as needed for tests.
"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.GetCommandInterpreter().HandleCommand('po [[UIWindow keyWindow] recursiveDescription]', result) def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
<commit_before>"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]') def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv') <commit_msg>Update view hierarchy script to use the interpreter as needed for tests.<commit_after>
"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.GetCommandInterpreter().HandleCommand('po [[UIWindow keyWindow] recursiveDescription]', result) def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]') def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv') Update view hierarchy script to use the interpreter as needed for tests."""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.GetCommandInterpreter().HandleCommand('po [[UIWindow keyWindow] recursiveDescription]', result) def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
<commit_before>"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]') def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv') <commit_msg>Update view hierarchy script to use the interpreter as needed for tests.<commit_after>"""Prints the current view hierarchy. Usage: pv """ def print_view_hierarchy(debugger, command, result, internal_dict): debugger.GetCommandInterpreter().HandleCommand('po [[UIWindow keyWindow] recursiveDescription]', result) def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
6dce76d8442e835b2c27db969ef4b3285d9c10bb
tests/testwith.py
tests/testwith.py
import sys if sys.version_info[:2] > (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()
import sys if sys.version_info[:2] >= (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()
Enable with statement tests for Python 2.5
Enable with statement tests for Python 2.5 --HG-- extra : convert_revision : svn%3Ab9624562-6840-0410-91c4-7d0ded462287/trunk%40287
Python
bsd-2-clause
WiserTogether/mock,beyang/mock
import sys if sys.version_info[:2] > (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()Enable with statement tests for Python 2.5 --HG-- extra : convert_revision : svn%3Ab9624562-6840-0410-91c4-7d0ded462287/trunk%40287
import sys if sys.version_info[:2] >= (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()
<commit_before>import sys if sys.version_info[:2] > (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()<commit_msg>Enable with statement tests for Python 2.5 --HG-- extra : convert_revision : svn%3Ab9624562-6840-0410-91c4-7d0ded462287/trunk%40287<commit_after>
import sys if sys.version_info[:2] >= (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()
import sys if sys.version_info[:2] > (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()Enable with statement tests for Python 2.5 --HG-- extra : convert_revision : svn%3Ab9624562-6840-0410-91c4-7d0ded462287/trunk%40287import sys if sys.version_info[:2] >= (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()
<commit_before>import sys if sys.version_info[:2] > (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()<commit_msg>Enable with statement tests for Python 2.5 --HG-- extra : convert_revision : svn%3Ab9624562-6840-0410-91c4-7d0ded462287/trunk%40287<commit_after>import sys if sys.version_info[:2] >= (2, 5): from tests._testwith import * else: from tests.support import unittest2 class TestWith(unittest2.TestCase): @unittest2.skip('tests using with statement skipped on Python 2.4') def testWith(self): pass if __name__ == '__main__': unittest2.main()
c53cec75ef487ccd2eb9e86987f67bd8bfff87d2
tests/integration/cli/sync_test.py
tests/integration/cli/sync_test.py
from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo')
from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') def test_sync_repo_is_destructive(self): self.exec_in_container('busyboxa', 'touch /repo/testfile') self.assertFileInContainer('busyboxa', '/repo/testfile') self.run_command('sync fake-repo') self.assertFileNotInContainer('busyboxa', '/repo/testfile')
Add test that sync is now destructive
Add test that sync is now destructive
Python
mit
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') Add test that sync is now destructive
from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') def test_sync_repo_is_destructive(self): self.exec_in_container('busyboxa', 'touch /repo/testfile') self.assertFileInContainer('busyboxa', '/repo/testfile') self.run_command('sync fake-repo') self.assertFileNotInContainer('busyboxa', '/repo/testfile')
<commit_before>from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') <commit_msg>Add test that sync is now destructive<commit_after>
from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') def test_sync_repo_is_destructive(self): self.exec_in_container('busyboxa', 'touch /repo/testfile') self.assertFileInContainer('busyboxa', '/repo/testfile') self.run_command('sync fake-repo') self.assertFileNotInContainer('busyboxa', '/repo/testfile')
from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') Add test that sync is now destructivefrom ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') def test_sync_repo_is_destructive(self): self.exec_in_container('busyboxa', 'touch /repo/testfile') self.assertFileInContainer('busyboxa', '/repo/testfile') self.run_command('sync fake-repo') self.assertFileNotInContainer('busyboxa', '/repo/testfile')
<commit_before>from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') <commit_msg>Add test that sync is now destructive<commit_after>from ...testcases import DustyIntegrationTestCase from ...fixtures import busybox_single_app_bundle_fixture class TestSyncCLI(DustyIntegrationTestCase): def setUp(self): super(TestSyncCLI, self).setUp() busybox_single_app_bundle_fixture() self.run_command('bundles activate busyboxa') self.run_command('up') def tearDown(self): self.run_command('bundles deactivate busyboxa') try: self.run_command('stop') except Exception: pass super(TestSyncCLI, self).tearDown() def test_sync_repo(self): self.exec_in_container('busyboxa', 'rm -rf /repo') self.assertFileNotInContainer('busyboxa', '/repo/README.md') self.run_command('sync fake-repo') self.assertFileContentsInContainer('busyboxa', '/repo/README.md', '# fake-repo') def test_sync_repo_is_destructive(self): self.exec_in_container('busyboxa', 'touch /repo/testfile') self.assertFileInContainer('busyboxa', '/repo/testfile') self.run_command('sync fake-repo') self.assertFileNotInContainer('busyboxa', '/repo/testfile')
5c8e83373a854242aca7a82611d47d1fdb85269e
toolbox/models.py
toolbox/models.py
from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, activation='relu', init='he_normal', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, activation='relu', init='he_normal')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3)) return model
from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, init='he_normal', activation='relu', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, init='he_normal', activation='relu')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3, init='he_normal')) return model
Use he_normal initialization for all layers
Use he_normal initialization for all layers
Python
mit
qobilidop/srcnn,qobilidop/srcnn
from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, activation='relu', init='he_normal', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, activation='relu', init='he_normal')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3)) return model Use he_normal initialization for all layers
from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, init='he_normal', activation='relu', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, init='he_normal', activation='relu')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3, init='he_normal')) return model
<commit_before>from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, activation='relu', init='he_normal', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, activation='relu', init='he_normal')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3)) return model <commit_msg>Use he_normal initialization for all layers<commit_after>
from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, init='he_normal', activation='relu', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, init='he_normal', activation='relu')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3, init='he_normal')) return model
from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, activation='relu', init='he_normal', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, activation='relu', init='he_normal')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3)) return model Use he_normal initialization for all layersfrom keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, init='he_normal', activation='relu', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, init='he_normal', activation='relu')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3, init='he_normal')) return model
<commit_before>from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, activation='relu', init='he_normal', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, activation='relu', init='he_normal')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3)) return model <commit_msg>Use he_normal initialization for all layers<commit_after>from keras.layers import Conv2D from keras.models import Sequential def srcnn(input_shape, c=1, f1=9, f2=1, f3=5, n1=64, n2=32): model = Sequential() model.add(Conv2D(nb_filter=n1, nb_row=f1, nb_col=f1, init='he_normal', activation='relu', input_shape=input_shape)) model.add(Conv2D(nb_filter=n2, nb_row=f2, nb_col=f2, init='he_normal', activation='relu')) model.add(Conv2D(nb_filter=c, nb_row=f3, nb_col=f3, init='he_normal')) return model
9c5b501399bd7c7045b972f0343c0bee881c9dd5
sync_watchdog.py
sync_watchdog.py
from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: os.kill(worker["Process"], signal.SIGKILL) alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]})
from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't # if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: # os.kill(worker["Process"], signal.SIGKILL) # alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]})
Disable sync watchdog timeout check.
Disable sync watchdog timeout check.
Python
apache-2.0
mduggan/tapiriik,abhijit86k/tapiriik,campbellr/tapiriik,campbellr/tapiriik,cmgrote/tapiriik,marxin/tapiriik,brunoflores/tapiriik,dmschreiber/tapiriik,gavioto/tapiriik,dlenski/tapiriik,brunoflores/tapiriik,cgourlay/tapiriik,marxin/tapiriik,cgourlay/tapiriik,cpfair/tapiriik,mduggan/tapiriik,mduggan/tapiriik,abhijit86k/tapiriik,dlenski/tapiriik,abs0/tapiriik,gavioto/tapiriik,brunoflores/tapiriik,dmschreiber/tapiriik,niosus/tapiriik,campbellr/tapiriik,olamy/tapiriik,mjnbike/tapiriik,mduggan/tapiriik,niosus/tapiriik,cheatos101/tapiriik,marxin/tapiriik,olamy/tapiriik,cheatos101/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,abhijit86k/tapiriik,olamy/tapiriik,abhijit86k/tapiriik,brunoflores/tapiriik,cgourlay/tapiriik,niosus/tapiriik,campbellr/tapiriik,abs0/tapiriik,niosus/tapiriik,cmgrote/tapiriik,abs0/tapiriik,mjnbike/tapiriik,mjnbike/tapiriik,cpfair/tapiriik,cpfair/tapiriik,dlenski/tapiriik,cheatos101/tapiriik,cheatos101/tapiriik,cmgrote/tapiriik,cgourlay/tapiriik,dmschreiber/tapiriik,olamy/tapiriik,cpfair/tapiriik,dlenski/tapiriik,marxin/tapiriik,gavioto/tapiriik,dmschreiber/tapiriik,gavioto/tapiriik,abs0/tapiriik
from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: os.kill(worker["Process"], signal.SIGKILL) alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]}) Disable sync watchdog timeout check.
from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't # if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: # os.kill(worker["Process"], signal.SIGKILL) # alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]})
<commit_before>from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: os.kill(worker["Process"], signal.SIGKILL) alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]}) <commit_msg>Disable sync watchdog timeout check.<commit_after>
from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't # if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: # os.kill(worker["Process"], signal.SIGKILL) # alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]})
from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: os.kill(worker["Process"], signal.SIGKILL) alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]}) Disable sync watchdog timeout check.from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't # if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: # os.kill(worker["Process"], signal.SIGKILL) # alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]})
<commit_before>from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: os.kill(worker["Process"], signal.SIGKILL) alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]}) <commit_msg>Disable sync watchdog timeout check.<commit_after>from tapiriik.database import db from tapiriik.sync import SyncStep import os import signal import socket from datetime import timedelta, datetime for worker in db.sync_workers.find({"Host": socket.gethostname()}): # Does the process still exist? alive = True try: os.kill(worker["Process"], 0) except os.error: alive = False # Has it been stalled for too long? if worker["State"] == SyncStep.List: timeout = timedelta(minutes=45) # This can take a loooooooong time else: timeout = timedelta(minutes=10) # But everything else shouldn't # if alive and worker["Heartbeat"] < datetime.utcnow() - timeout: # os.kill(worker["Process"], signal.SIGKILL) # alive = False # Clear it from the database if it's not alive. if not alive: db.sync_workers.remove({"_id": worker["_id"]})
cf025969d7cbf2d78cfbf890967fd6a67fbc53c6
django_bootstrap_calendar/models.py
django_bootstrap_calendar/models.py
# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title
# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext_lazy as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title
Use ugettext_lazy instead of ugettext to make it compatible with Django 1.7
Use ugettext_lazy instead of ugettext to make it compatible with Django 1.7 https://docs.djangoproject.com/en/dev/ref/applications/#applications-troubleshooting
Python
bsd-3-clause
dannybrowne86/django-bootstrap-calendar,mfmarlonferrari/django-bootstrap-calendar,arbitrahj/django-bootstrap-calendar,arbitrahj/django-bootstrap-calendar,tiagovaz/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar,mfmarlonferrari/django-bootstrap-calendar,tiagovaz/django-bootstrap-calendar,tiagovaz/django-bootstrap-calendar,dannybrowne86/django-bootstrap-calendar,arbitrahj/django-bootstrap-calendar,dannybrowne86/django-bootstrap-calendar,mfmarlonferrari/django-bootstrap-calendar
# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title Use ugettext_lazy instead of ugettext to make it compatible with Django 1.7 https://docs.djangoproject.com/en/dev/ref/applications/#applications-troubleshooting
# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext_lazy as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title
<commit_before># -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title <commit_msg>Use ugettext_lazy instead of ugettext to make it compatible with Django 1.7 https://docs.djangoproject.com/en/dev/ref/applications/#applications-troubleshooting<commit_after>
# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext_lazy as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title
# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title Use ugettext_lazy instead of ugettext to make it compatible with Django 1.7 https://docs.djangoproject.com/en/dev/ref/applications/#applications-troubleshooting# -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext_lazy as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title
<commit_before># -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title <commit_msg>Use ugettext_lazy instead of ugettext to make it compatible with Django 1.7 https://docs.djangoproject.com/en/dev/ref/applications/#applications-troubleshooting<commit_after># -*- coding: utf-8 -*- __author__ = 'sandlbn' from django.db import models from django.utils.translation import ugettext_lazy as _ from utils import datetime_to_timestamp class CalendarEvent(models.Model): """ Calendar Events """ CSS_CLASS_CHOICES = ( ('', _('Normal')), ('event-warning', _('Warning')), ('event-info', _('Info')), ('event-success', _('Success')), ('event-inverse', _('Inverse')), ('event-special', _('Special')), ('event-important', _('Important')), ) title = models.CharField(max_length=255, verbose_name=_('Title')) url = models.URLField(verbose_name=_('URL')) css_class = models.CharField(max_length=20, verbose_name=_('CSS Class')) start = models.DateTimeField(verbose_name=_('Start Date')) end = models.DateTimeField(verbose_name=_('End Date'), blank=True) @property def start_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.start) @property def end_timestamp(self): """ Return end date as timestamp """ return datetime_to_timestamp(self.end) def __unicode__(self): return self.title
e07b2e24cddc8a2e2d1c8838e8509b2009344714
util/BaseModel.py
util/BaseModel.py
""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True)
""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
Add a utility method to get instances from urlsafe key.
Add a utility method to get instances from urlsafe key.
Python
apache-2.0
kkinder/GAEStarterKit,kkinder/GAEStarterKit,kkinder/GAEStarterKit
""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) Add a utility method to get instances from urlsafe key.
""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
<commit_before>""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) <commit_msg>Add a utility method to get instances from urlsafe key.<commit_after>
""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) Add a utility method to get instances from urlsafe key.""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
<commit_before>""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) <commit_msg>Add a utility method to get instances from urlsafe key.<commit_after>""" Contains base class for ndb models. This adds functionality that is expected (or at least useful) elsewhere in GEAStarterKit. """ from google.appengine.ext import ndb class BaseModel(ndb.Model): date_created = ndb.DateTimeProperty(auto_now_add=True, required=True) date_modified = ndb.DateTimeProperty(auto_now=True, required=True) @classmethod def from_urlsafe(cls, urlsafe): key = ndb.Key(urlsafe=urlsafe) obj = key.get() if obj and isinstance(obj, cls): return obj
39769907bdcd019ec6a7d4f2ee1be82efd760611
src/rinoh/language/pl.py
src/rinoh/language/pl.py
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Index', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrzeżenie!', danger='Uwaga!', error='Błąd', hint='Wskazówka', important='Ważne', note='Uwaga', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Skorowidz', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrożnie!', danger='!NIEBEZPIECZEŃSTWO!', error='Błąd', hint='Wskazówka', important='Ważne', note='Notatka', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL
Add Polish language document strings.
Add Polish language document strings.
Python
agpl-3.0
brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Index', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrzeżenie!', danger='Uwaga!', error='Błąd', hint='Wskazówka', important='Ważne', note='Uwaga', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL Add Polish language document strings.
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Skorowidz', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrożnie!', danger='!NIEBEZPIECZEŃSTWO!', error='Błąd', hint='Wskazówka', important='Ważne', note='Notatka', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL
<commit_before># This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Index', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrzeżenie!', danger='Uwaga!', error='Błąd', hint='Wskazówka', important='Ważne', note='Uwaga', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL <commit_msg>Add Polish language document strings.<commit_after>
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Skorowidz', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrożnie!', danger='!NIEBEZPIECZEŃSTWO!', error='Błąd', hint='Wskazówka', important='Ważne', note='Notatka', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Index', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrzeżenie!', danger='Uwaga!', error='Błąd', hint='Wskazówka', important='Ważne', note='Uwaga', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL Add Polish language document strings.# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Skorowidz', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrożnie!', danger='!NIEBEZPIECZEŃSTWO!', error='Błąd', hint='Wskazówka', important='Ważne', note='Notatka', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL
<commit_before># This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Index', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrzeżenie!', danger='Uwaga!', error='Błąd', hint='Wskazówka', important='Ważne', note='Uwaga', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL <commit_msg>Add Polish language document strings.<commit_after># This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. from .cls import Language from ..structure import SectionTitles, AdmonitionTitles PL = Language('pl', 'Polski') SectionTitles( contents='Spis Treści', list_of_figures='Spis Ilustracji', list_of_tables='Spis Tabel', chapter='Rozdział', index='Skorowidz', ) in PL AdmonitionTitles( attention='Uwaga!', caution='Ostrożnie!', danger='!NIEBEZPIECZEŃSTWO!', error='Błąd', hint='Wskazówka', important='Ważne', note='Notatka', tip='Porada', warning='Ostrzeżenie', seealso='Zobacz również', ) in PL
cdcb503d3dbc4679a2bda9dd204df18ab334d70c
pyclub/content/forms.py
pyclub/content/forms.py
# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', 'status', )
# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', )
Remove campo com default do form
Remove campo com default do form
Python
mit
dvl/pyclub,dvl/pyclub,dvl/pyclub
# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', 'status', ) Remove campo com default do form
# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', )
<commit_before># -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', 'status', ) <commit_msg>Remove campo com default do form<commit_after>
# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', )
# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', 'status', ) Remove campo com default do form# -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', )
<commit_before># -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', 'status', ) <commit_msg>Remove campo com default do form<commit_after># -*- coding: utf-8 -*- from django import forms from . import models class PostForm(forms.ModelForm): class Meta: model = models.Post fields = ( 'title', 'body', )
56a94b6ca5cadceb503edc7b968f813e66fafe92
src/web/__init__.py
src/web/__init__.py
# -*- coding: utf-8 -*- from random import choice from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_content = mime_types popular_content = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return choice(list(self.all_content.keys())) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return choice(list(self.popular_content.keys()))
# -*- coding: utf-8 -*- from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_mime_types = mime_types popular_mime_types = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return self.random_element(self.all_mime_types.keys()) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return self.random_element(self.popular_mime_types.keys())
Use random_element instead of random.choice
Use random_element instead of random.choice
Python
apache-2.0
thiagofigueiro/faker_web
# -*- coding: utf-8 -*- from random import choice from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_content = mime_types popular_content = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return choice(list(self.all_content.keys())) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return choice(list(self.popular_content.keys())) Use random_element instead of random.choice
# -*- coding: utf-8 -*- from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_mime_types = mime_types popular_mime_types = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return self.random_element(self.all_mime_types.keys()) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return self.random_element(self.popular_mime_types.keys())
<commit_before># -*- coding: utf-8 -*- from random import choice from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_content = mime_types popular_content = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return choice(list(self.all_content.keys())) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return choice(list(self.popular_content.keys())) <commit_msg>Use random_element instead of random.choice<commit_after>
# -*- coding: utf-8 -*- from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_mime_types = mime_types popular_mime_types = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return self.random_element(self.all_mime_types.keys()) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return self.random_element(self.popular_mime_types.keys())
# -*- coding: utf-8 -*- from random import choice from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_content = mime_types popular_content = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return choice(list(self.all_content.keys())) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return choice(list(self.popular_content.keys())) Use random_element instead of random.choice# -*- coding: utf-8 -*- from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_mime_types = mime_types popular_mime_types = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return self.random_element(self.all_mime_types.keys()) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return self.random_element(self.popular_mime_types.keys())
<commit_before># -*- coding: utf-8 -*- from random import choice from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_content = mime_types popular_content = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return choice(list(self.all_content.keys())) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return choice(list(self.popular_content.keys())) <commit_msg>Use random_element instead of random.choice<commit_after># -*- coding: utf-8 -*- from faker.providers import BaseProvider from .mimetypes import mime_types class WebProvider(BaseProvider): """ A Provider for web-related test data. >>> from faker import Faker >>> from faker_web import WebProvider >>> fake = Faker() >>> fake.add_provider(WebProvider) """ all_mime_types = mime_types popular_mime_types = { 'application/javascript': ['js'], 'application/json': ['json'], 'application/pdf': ['pdf'], 'image/jpeg': ['jpeg', 'jpg', 'jpe'], 'image/gif': ['gif'], 'image/png': ['png'], 'image/svg+xml': ['svg', 'svgz'], 'text/css': ['css'], 'text/html': ['html', 'htm'], 'text/plain': ['txt', 'text', 'conf', 'def', 'list', 'log', 'in'], } def mime_type(self): """ Returns a mime-type from the list of types understood by the Apache http server. >>> fake.mime_type() application/mxf :return: content-type/mime-type :rtype: str """ return self.random_element(self.all_mime_types.keys()) def mime_type_popular(self): """ Returns a popular mime-type. >>> fake.mime_type_popular() text/html :return: content-type/mime-type :rtype: str """ return self.random_element(self.popular_mime_types.keys())
c69e18a4dd324b8d32fb3d5c74bd011c7fa081d6
waybackpack/session.py
waybackpack/session.py
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
Add stream=True to requests params
Add stream=True to requests params
Python
mit
jsvine/waybackpack
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res Add stream=True to requests params
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
<commit_before>from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res <commit_msg>Add stream=True to requests params<commit_after>
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res Add stream=True to requests paramsfrom .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
<commit_before>from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res <commit_msg>Add stream=True to requests params<commit_after>from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
6cedfb17afbb3a869336d23cefdfcae1a65754f9
tests/test_check.py
tests/test_check.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot import check class TestIsBinary(unittest.TestCase): def setUp(self): pass def test_is_binary(self): pass def tearDown(self): pass if __name__ == '__main__': unittest.main()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot.check import is_binary class TestIsBinary(unittest.TestCase): def test_css(self): self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css')) def test_json(self): self.assertFalse(is_binary('tests/files/cookiecutter.json')) def test_eot(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot')) def test_otf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf')) def test_svg(self): self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg')) def test_ttf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf')) def test_woff(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff')) def test_txt(self): self.assertFalse(is_binary('tests/files/robots.txt')) if __name__ == '__main__': unittest.main()
Add lots of miserably failing tests.
Add lots of miserably failing tests.
Python
bsd-3-clause
pombredanne/binaryornot,0k/binaryornot,pombredanne/binaryornot,pombredanne/binaryornot,audreyr/binaryornot,audreyr/binaryornot,hackebrot/binaryornot,hackebrot/binaryornot,0k/binaryornot,audreyr/binaryornot,hackebrot/binaryornot
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot import check class TestIsBinary(unittest.TestCase): def setUp(self): pass def test_is_binary(self): pass def tearDown(self): pass if __name__ == '__main__': unittest.main()Add lots of miserably failing tests.
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot.check import is_binary class TestIsBinary(unittest.TestCase): def test_css(self): self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css')) def test_json(self): self.assertFalse(is_binary('tests/files/cookiecutter.json')) def test_eot(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot')) def test_otf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf')) def test_svg(self): self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg')) def test_ttf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf')) def test_woff(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff')) def test_txt(self): self.assertFalse(is_binary('tests/files/robots.txt')) if __name__ == '__main__': unittest.main()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot import check class TestIsBinary(unittest.TestCase): def setUp(self): pass def test_is_binary(self): pass def tearDown(self): pass if __name__ == '__main__': unittest.main()<commit_msg>Add lots of miserably failing tests.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot.check import is_binary class TestIsBinary(unittest.TestCase): def test_css(self): self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css')) def test_json(self): self.assertFalse(is_binary('tests/files/cookiecutter.json')) def test_eot(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot')) def test_otf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf')) def test_svg(self): self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg')) def test_ttf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf')) def test_woff(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff')) def test_txt(self): self.assertFalse(is_binary('tests/files/robots.txt')) if __name__ == '__main__': unittest.main()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot import check class TestIsBinary(unittest.TestCase): def setUp(self): pass def test_is_binary(self): pass def tearDown(self): pass if __name__ == '__main__': unittest.main()Add lots of miserably failing tests.#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot.check import is_binary class TestIsBinary(unittest.TestCase): def test_css(self): self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css')) def test_json(self): self.assertFalse(is_binary('tests/files/cookiecutter.json')) def test_eot(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot')) def test_otf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf')) def test_svg(self): self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg')) def test_ttf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf')) def test_woff(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff')) def test_txt(self): self.assertFalse(is_binary('tests/files/robots.txt')) if __name__ == '__main__': unittest.main()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot import check class TestIsBinary(unittest.TestCase): def setUp(self): pass def test_is_binary(self): pass def tearDown(self): pass if __name__ == '__main__': unittest.main()<commit_msg>Add lots of miserably failing tests.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_binaryornot ------------------ Tests for `binaryornot` module. """ import unittest from binaryornot.check import is_binary class TestIsBinary(unittest.TestCase): def test_css(self): self.assertFalse(is_binary('tests/files/bootstrap-glyphicons.css')) def test_json(self): self.assertFalse(is_binary('tests/files/cookiecutter.json')) def test_eot(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.eot')) def test_otf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.otf')) def test_svg(self): self.assertFalse(is_binary('tests/files/glyphiconshalflings-regular.svg')) def test_ttf(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.ttf')) def test_woff(self): self.assertTrue(is_binary('tests/files/glyphiconshalflings-regular.woff')) def test_txt(self): self.assertFalse(is_binary('tests/files/robots.txt')) if __name__ == '__main__': unittest.main()
d59247df00a5899c0f4933df42a9d369db1931ab
tests/helpers.py
tests/helpers.py
import virtualbox def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder )
import unittest import virtualbox class VirtualboxTestCase(unittest.TestCase): def setUp(self): self.vbox = virtualbox.vb_get_manager() def assertMachineExists(self, name, msg=None): try: self.vbox.findMachine(name) except Exception as e: if msg: self.fail(msg) else: self.fail(e.message) def assertMachineDoesNotExist(self, name, msg=None): self.assertRaisesRegexp(Exception, "Could not find a registered machine", self.vbox.findMachine, name) def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder )
Create a basic VirtualBoxTestCase with helper assertions
Create a basic VirtualBoxTestCase with helper assertions
Python
apache-2.0
saltstack/salt,saltstack/salt,LoveIsGrief/saltcloud-virtualbox-provider,saltstack/salt,saltstack/salt,saltstack/salt
import virtualbox def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder ) Create a basic VirtualBoxTestCase with helper assertions
import unittest import virtualbox class VirtualboxTestCase(unittest.TestCase): def setUp(self): self.vbox = virtualbox.vb_get_manager() def assertMachineExists(self, name, msg=None): try: self.vbox.findMachine(name) except Exception as e: if msg: self.fail(msg) else: self.fail(e.message) def assertMachineDoesNotExist(self, name, msg=None): self.assertRaisesRegexp(Exception, "Could not find a registered machine", self.vbox.findMachine, name) def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder )
<commit_before>import virtualbox def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder ) <commit_msg>Create a basic VirtualBoxTestCase with helper assertions<commit_after>
import unittest import virtualbox class VirtualboxTestCase(unittest.TestCase): def setUp(self): self.vbox = virtualbox.vb_get_manager() def assertMachineExists(self, name, msg=None): try: self.vbox.findMachine(name) except Exception as e: if msg: self.fail(msg) else: self.fail(e.message) def assertMachineDoesNotExist(self, name, msg=None): self.assertRaisesRegexp(Exception, "Could not find a registered machine", self.vbox.findMachine, name) def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder )
import virtualbox def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder ) Create a basic VirtualBoxTestCase with helper assertionsimport unittest import virtualbox class VirtualboxTestCase(unittest.TestCase): def setUp(self): self.vbox = virtualbox.vb_get_manager() def assertMachineExists(self, name, msg=None): try: self.vbox.findMachine(name) except Exception as e: if msg: self.fail(msg) else: self.fail(e.message) def assertMachineDoesNotExist(self, name, msg=None): self.assertRaisesRegexp(Exception, "Could not find a registered machine", self.vbox.findMachine, name) def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder )
<commit_before>import virtualbox def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder ) <commit_msg>Create a basic VirtualBoxTestCase with helper assertions<commit_after>import unittest import virtualbox class VirtualboxTestCase(unittest.TestCase): def setUp(self): self.vbox = virtualbox.vb_get_manager() def assertMachineExists(self, name, msg=None): try: self.vbox.findMachine(name) except Exception as e: if msg: self.fail(msg) else: self.fail(e.message) def assertMachineDoesNotExist(self, name, msg=None): self.assertRaisesRegexp(Exception, "Could not find a registered machine", self.vbox.findMachine, name) def list_machines(): vbox = virtualbox.vb_get_manager() for machine in vbox.getArray(vbox, "Machines"): print "Machine '%s' logs in '%s'" % ( machine.name, machine.logFolder )
c203f53257e4ac873f3361859158024a45b7fb56
test/test_object.py
test/test_object.py
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): arena = lib.qcgc_arena_create() o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) if __name__ == "__main__": unittest.main()
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): o = self.allocate(16) arena = lib.qcgc_arena_addr(ffi.cast("cell_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) lib.qcgc_state.state = lib.GC_MARK o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_arena_set_blocktype(ffi.cast("cell_t *", o), lib.BLOCK_BLACK) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) self.assertEqual(lib.arena_gray_stack(arena).index, 1) self.assertEqual(lib.arena_gray_stack(arena).items[0], o) if __name__ == "__main__": unittest.main()
Add additional test for black object barrier
Add additional test for black object barrier
Python
mit
ntruessel/qcgc,ntruessel/qcgc,ntruessel/qcgc
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): arena = lib.qcgc_arena_create() o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) if __name__ == "__main__": unittest.main() Add additional test for black object barrier
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): o = self.allocate(16) arena = lib.qcgc_arena_addr(ffi.cast("cell_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) lib.qcgc_state.state = lib.GC_MARK o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_arena_set_blocktype(ffi.cast("cell_t *", o), lib.BLOCK_BLACK) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) self.assertEqual(lib.arena_gray_stack(arena).index, 1) self.assertEqual(lib.arena_gray_stack(arena).items[0], o) if __name__ == "__main__": unittest.main()
<commit_before>from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): arena = lib.qcgc_arena_create() o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) if __name__ == "__main__": unittest.main() <commit_msg>Add additional test for black object barrier<commit_after>
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): o = self.allocate(16) arena = lib.qcgc_arena_addr(ffi.cast("cell_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) lib.qcgc_state.state = lib.GC_MARK o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_arena_set_blocktype(ffi.cast("cell_t *", o), lib.BLOCK_BLACK) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) self.assertEqual(lib.arena_gray_stack(arena).index, 1) self.assertEqual(lib.arena_gray_stack(arena).items[0], o) if __name__ == "__main__": unittest.main()
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): arena = lib.qcgc_arena_create() o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) if __name__ == "__main__": unittest.main() Add additional test for black object barrierfrom support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): o = self.allocate(16) arena = lib.qcgc_arena_addr(ffi.cast("cell_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) lib.qcgc_state.state = lib.GC_MARK o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_arena_set_blocktype(ffi.cast("cell_t *", o), lib.BLOCK_BLACK) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) self.assertEqual(lib.arena_gray_stack(arena).index, 1) self.assertEqual(lib.arena_gray_stack(arena).items[0], o) if __name__ == "__main__": unittest.main()
<commit_before>from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): arena = lib.qcgc_arena_create() o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) if __name__ == "__main__": unittest.main() <commit_msg>Add additional test for black object barrier<commit_after>from support import lib,ffi from qcgc_test import QCGCTest import unittest class ObjectTestCase(QCGCTest): def test_write_barrier(self): o = self.allocate(16) arena = lib.qcgc_arena_addr(ffi.cast("cell_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) lib.qcgc_state.state = lib.GC_MARK o = self.allocate(16) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, 0) lib.qcgc_arena_set_blocktype(ffi.cast("cell_t *", o), lib.BLOCK_BLACK) lib.qcgc_write(ffi.cast("object_t *", o)) self.assertEqual(ffi.cast("object_t *", o).flags & lib.QCGC_GRAY_FLAG, lib.QCGC_GRAY_FLAG) self.assertEqual(lib.arena_gray_stack(arena).index, 1) self.assertEqual(lib.arena_gray_stack(arena).items[0], o) if __name__ == "__main__": unittest.main()
7ef436dc909fdcb3ba917faddda585f8619bc5ed
testing/runtests.py
testing/runtests.py
# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2)
# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": import django if django.VERSION[:2] >= (1, 7): django.setup() args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2)
Fix running the tests against Django 1.7.
Fix running the tests against Django 1.7.
Python
bsd-3-clause
Natgeoed/djorm-ext-pgarray,natgeo/djorm-ext-pgarray,natgeo/djorm-ext-pgarray,niwinz/djorm-pgarray,niwinz/djorm-pgarray,Natgeoed/djorm-ext-pgarray
# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2) Fix running the tests against Django 1.7.
# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": import django if django.VERSION[:2] >= (1, 7): django.setup() args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2)
<commit_before># -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2) <commit_msg>Fix running the tests against Django 1.7.<commit_after>
# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": import django if django.VERSION[:2] >= (1, 7): django.setup() args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2)
# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2) Fix running the tests against Django 1.7.# -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": import django if django.VERSION[:2] >= (1, 7): django.setup() args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2)
<commit_before># -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2) <commit_msg>Fix running the tests against Django 1.7.<commit_after># -*- coding: utf-8 -*- import os, sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import call_command if __name__ == "__main__": import django if django.VERSION[:2] >= (1, 7): django.setup() args = sys.argv[1:] if len(args) == 0: args.append("pg_array_fields") call_command("test", *args, verbosity=2)
c6d7f2b1214e86f09431ab1d8e5c312f7a87081d
pttrack/views.py
pttrack/views.py
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required # ... # redirect to a new URL: return HttpResponseRedirect('/') # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form})
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect from django.core.urlresolvers import reverse from . import models as mymodels # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): p = mymodels.Patient(**form.cleaned_data) p.save() # redirect to a new URL: return HttpResponseRedirect(reverse(patient, args=(p.id,))) # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form})
Set up redirect at db saves for new patients.
Set up redirect at db saves for new patients.
Python
mit
SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required # ... # redirect to a new URL: return HttpResponseRedirect('/') # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form}) Set up redirect at db saves for new patients.
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect from django.core.urlresolvers import reverse from . import models as mymodels # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): p = mymodels.Patient(**form.cleaned_data) p.save() # redirect to a new URL: return HttpResponseRedirect(reverse(patient, args=(p.id,))) # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form})
<commit_before>from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required # ... # redirect to a new URL: return HttpResponseRedirect('/') # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form}) <commit_msg>Set up redirect at db saves for new patients.<commit_after>
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect from django.core.urlresolvers import reverse from . import models as mymodels # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): p = mymodels.Patient(**form.cleaned_data) p.save() # redirect to a new URL: return HttpResponseRedirect(reverse(patient, args=(p.id,))) # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form})
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required # ... # redirect to a new URL: return HttpResponseRedirect('/') # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form}) Set up redirect at db saves for new patients.from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect from django.core.urlresolvers import reverse from . import models as mymodels # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): p = mymodels.Patient(**form.cleaned_data) p.save() # redirect to a new URL: return HttpResponseRedirect(reverse(patient, args=(p.id,))) # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form})
<commit_before>from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required # ... # redirect to a new URL: return HttpResponseRedirect('/') # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form}) <commit_msg>Set up redirect at db saves for new patients.<commit_after>from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect from django.core.urlresolvers import reverse from . import models as mymodels # Create your views here.from django.http import HttpResponse def index(request): return HttpResponse("Hello, world. You're at the BIG TABLE.") def clindate(request, clindate): (year, month, day) = clindate.split("-") return HttpResponse("Clinic date %s" % year+" "+month+" "+day) def patient(request, pt_uuid): return HttpResponse("You're looking at patient %s" % pt_uuid) def intake(request): from . import forms as myforms # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = myforms.PatientForm(request.POST) # check whether it's valid: if form.is_valid(): p = mymodels.Patient(**form.cleaned_data) p.save() # redirect to a new URL: return HttpResponseRedirect(reverse(patient, args=(p.id,))) # if a GET (or any other method) we'll create a blank form else: form = myforms.PatientForm() return render(request, 'pttrack/intake.html', {'form': form})
3050971ec01d14e12d276bc47193abeac0364862
todoman/__init__.py
todoman/__init__.py
from setuptools_scm import get_version __version__ = get_version(version_scheme='post-release')
from setuptools_scm import get_version import pkg_resources try: __version__ = get_version(version_scheme='post-release') except LookupError: __version__ = pkg_resources.get_distribution('todoman').version
Fix determining version outside a git repository
Fix determining version outside a git repository
Python
isc
Sakshisaraswat/todoman,hobarrera/todoman,asalminen/todoman,pimutils/todoman,rimshaakhan/todoman,AnubhaAgrawal/todoman
from setuptools_scm import get_version __version__ = get_version(version_scheme='post-release') Fix determining version outside a git repository
from setuptools_scm import get_version import pkg_resources try: __version__ = get_version(version_scheme='post-release') except LookupError: __version__ = pkg_resources.get_distribution('todoman').version
<commit_before>from setuptools_scm import get_version __version__ = get_version(version_scheme='post-release') <commit_msg>Fix determining version outside a git repository<commit_after>
from setuptools_scm import get_version import pkg_resources try: __version__ = get_version(version_scheme='post-release') except LookupError: __version__ = pkg_resources.get_distribution('todoman').version
from setuptools_scm import get_version __version__ = get_version(version_scheme='post-release') Fix determining version outside a git repositoryfrom setuptools_scm import get_version import pkg_resources try: __version__ = get_version(version_scheme='post-release') except LookupError: __version__ = pkg_resources.get_distribution('todoman').version
<commit_before>from setuptools_scm import get_version __version__ = get_version(version_scheme='post-release') <commit_msg>Fix determining version outside a git repository<commit_after>from setuptools_scm import get_version import pkg_resources try: __version__ = get_version(version_scheme='post-release') except LookupError: __version__ = pkg_resources.get_distribution('todoman').version
9d35e9f59ca6e15ca7484a686235c12b64858861
setup.py
setup.py
from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@release/marvin#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, )
from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@1.15.0#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, )
Update dict pin in prep for release
chore(pins): Update dict pin in prep for release - Update dict pin in prep for release
Python
apache-2.0
NCI-GDC/gdcdatamodel,NCI-GDC/gdcdatamodel
from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@release/marvin#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, ) chore(pins): Update dict pin in prep for release - Update dict pin in prep for release
from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@1.15.0#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, )
<commit_before>from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@release/marvin#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, ) <commit_msg>chore(pins): Update dict pin in prep for release - Update dict pin in prep for release<commit_after>
from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@1.15.0#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, )
from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@release/marvin#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, ) chore(pins): Update dict pin in prep for release - Update dict pin in prep for releasefrom setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@1.15.0#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, )
<commit_before>from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@release/marvin#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, ) <commit_msg>chore(pins): Update dict pin in prep for release - Update dict pin in prep for release<commit_after>from setuptools import setup, find_packages setup( name='gdcdatamodel', packages=find_packages(), install_requires=[ 'pytz==2016.4', 'graphviz==0.4.2', 'jsonschema==2.5.1', 'python-dateutil==2.4.2', 'psqlgraph', 'gdcdictionary', 'dictionaryutils>=2.0.0,<3.0.0', 'cdisutils', ], package_data={ "gdcdatamodel": [ "xml_mappings/*.yaml", ] }, dependency_links=[ 'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils', 'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph', 'git+https://github.com/NCI-GDC/gdcdictionary.git@1.15.0#egg=gdcdictionary', ], entry_points={ 'console_scripts': [ 'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main' ] }, )
5698faa6d6460c6e8008279c654fa448537720c2
setup.py
setup.py
#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.15", ], test_suite="abel.tests.run_cli" )
#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.14", ], test_suite="abel.tests.run_cli" )
Set minimal scipy dependency to 0.14
Set minimal scipy dependency to 0.14
Python
mit
PyAbel/PyAbel,huletlab/PyAbel,stggh/PyAbel,rth/PyAbel,DhrubajyotiDas/PyAbel
#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.15", ], test_suite="abel.tests.run_cli" ) Set minimal scipy dependency to 0.14
#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.14", ], test_suite="abel.tests.run_cli" )
<commit_before>#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.15", ], test_suite="abel.tests.run_cli" ) <commit_msg>Set minimal scipy dependency to 0.14<commit_after>
#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.14", ], test_suite="abel.tests.run_cli" )
#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.15", ], test_suite="abel.tests.run_cli" ) Set minimal scipy dependency to 0.14#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.14", ], test_suite="abel.tests.run_cli" )
<commit_before>#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.15", ], test_suite="abel.tests.run_cli" ) <commit_msg>Set minimal scipy dependency to 0.14<commit_after>#!/usr/bin/python from setuptools import setup, find_packages setup(name='PyAbel', version='0.5.0', description='A Python package for inverse Abel transforms', author='Dan Hickstein', packages=find_packages(), package_data={'abel': ['tests/data/*' ]}, install_requires=[ "numpy >= 1.6", "setuptools >= 16.0", "scipy >= 0.14", ], test_suite="abel.tests.run_cli" )
ddbd19d317940f61f724309b192dce5ed49f4cb0
setup.py
setup.py
from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'sqlite3', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } )
from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } )
Remove sqlite3 package. Its available by default in most python distributions.
Remove sqlite3 package. Its available by default in most python distributions.
Python
mit
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'sqlite3', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } ) Remove sqlite3 package. Its available by default in most python distributions.
from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } )
<commit_before>from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'sqlite3', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } ) <commit_msg>Remove sqlite3 package. Its available by default in most python distributions.<commit_after>
from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } )
from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'sqlite3', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } ) Remove sqlite3 package. Its available by default in most python distributions.from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } )
<commit_before>from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'sqlite3', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } ) <commit_msg>Remove sqlite3 package. Its available by default in most python distributions.<commit_after>from setuptools import setup, find_packages setup( name='weaveserver', version='0.8', author='Srivatsan Iyer', author_email='supersaiyanmode.rox@gmail.com', packages=find_packages(), license='MIT', description='Library to interact with Weave Server', long_description=open('README.md').read(), install_requires=[ 'weavelib', 'eventlet!=0.22', 'bottle', 'GitPython', 'redis', 'appdirs', 'peewee', ], entry_points={ 'console_scripts': [ 'weave-launch = app:handle_launch', 'weave-main = app:handle_main' ] } )
690cc2cb654a5772d73ecc83932358b4a6091921
setup.py
setup.py
import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=[ os.path.realpath(os.path.join(__file__, "..", "src", "wrapt", "_wrappers.c"))], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions )
import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=["src/wrapt/_wrappers.c"], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions )
Revert to relative path for sources.
Revert to relative path for sources.
Python
bsd-2-clause
GrahamDumpleton/wrapt,GrahamDumpleton/wrapt
import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=[ os.path.realpath(os.path.join(__file__, "..", "src", "wrapt", "_wrappers.c"))], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions ) Revert to relative path for sources.
import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=["src/wrapt/_wrappers.c"], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions )
<commit_before>import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=[ os.path.realpath(os.path.join(__file__, "..", "src", "wrapt", "_wrappers.c"))], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions ) <commit_msg>Revert to relative path for sources.<commit_after>
import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=["src/wrapt/_wrappers.c"], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions )
import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=[ os.path.realpath(os.path.join(__file__, "..", "src", "wrapt", "_wrappers.c"))], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions ) Revert to relative path for sources.import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=["src/wrapt/_wrappers.c"], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions )
<commit_before>import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=[ os.path.realpath(os.path.join(__file__, "..", "src", "wrapt", "_wrappers.c"))], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions ) <commit_msg>Revert to relative path for sources.<commit_after>import os import platform import setuptools # # --- Detect if extensions should be disabled ------------------------------ wrapt_env = os.environ.get('WRAPT_INSTALL_EXTENSIONS') if wrapt_env is None: wrapt_env = os.environ.get('WRAPT_EXTENSIONS') if wrapt_env is not None: disable_extensions = wrapt_env.lower() == 'false' force_extensions = wrapt_env.lower() == 'true' else: disable_extensions = False force_extensions = False if platform.python_implementation() != "CPython": disable_extensions = True # --- C extension ------------------------------------------------------------ extensions = [ setuptools.Extension( "wrapt._wrappers", sources=["src/wrapt/_wrappers.c"], optional=not force_extensions, ) ] # --- Setup ------------------------------------------------------------------ setuptools.setup( ext_modules=[] if disable_extensions else extensions )
06be7bebcc72d2ae77a9004b2a5cc0043df0e9a6
setup.py
setup.py
from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
Use subprocess32 under Python 2 for subprocess fixes.
Use subprocess32 under Python 2 for subprocess fixes.
Python
mit
regebro/spiny
from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' ) Use subprocess32 under Python 2 for subprocess fixes.
from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
<commit_before>from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' ) <commit_msg>Use subprocess32 under Python 2 for subprocess fixes.<commit_after>
from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' ) Use subprocess32 under Python 2 for subprocess fixes.from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
<commit_before>from setuptools import setup, find_packages import os.path with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=[], entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' ) <commit_msg>Use subprocess32 under Python 2 for subprocess fixes.<commit_after>from setuptools import setup, find_packages import os.path import sys if sys.version_info < (3,): install_requires = ['subprocess32'] else: install_requires = [] with open("README.rst") as infile: readme = infile.read() with open(os.path.join("docs", "CHANGES.txt")) as infile: changes = infile.read() long_desc = readme + '\n\n' + changes setup( name='spiny', version='0.3.dev0', description='''Spiny will run your Python tests under multiple versions of Python''', long_description=long_desc, keywords=['development', 'tools', 'testing'], classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Testing", ], packages=find_packages(), include_package_data=True, zip_safe=True, author='Lennart Regebro', author_email='regebro@gmail.com', url="https://github.com/regebro/spiny/", license='MIT', install_requires=install_requires, entry_points={ 'console_scripts': [ 'spiny = spiny.main:main', ] }, test_suite='tests' )
a29b144d0bf4e7c83fe1b63e6128bb327fe6fa89
setup.py
setup.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], )
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], entry_points={ 'console_scripts': [ 'fpga_inter_add_prim_lib=fpga_interchange.add_prim_lib:main', 'fpga_inter_convert=fpga_interchange.convert:main', 'fpga_inter_nextpnr_emit=fpga_interchange.nextpnr_emit:main', 'fpga_inter_patch=fpga_interchange.patch:main', 'fpga_inter_yosys_json=fpga_interchange.yosys_json:main', ], })
Add entry points for command line tools.
Add entry points for command line tools. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
Python
isc
SymbiFlow/python-fpga-interchange,SymbiFlow/python-fpga-interchange
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], ) Add entry points for command line tools. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], entry_points={ 'console_scripts': [ 'fpga_inter_add_prim_lib=fpga_interchange.add_prim_lib:main', 'fpga_inter_convert=fpga_interchange.convert:main', 'fpga_inter_nextpnr_emit=fpga_interchange.nextpnr_emit:main', 'fpga_inter_patch=fpga_interchange.patch:main', 'fpga_inter_yosys_json=fpga_interchange.yosys_json:main', ], })
<commit_before>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], ) <commit_msg>Add entry points for command line tools. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com><commit_after>
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], entry_points={ 'console_scripts': [ 'fpga_inter_add_prim_lib=fpga_interchange.add_prim_lib:main', 'fpga_inter_convert=fpga_interchange.convert:main', 'fpga_inter_nextpnr_emit=fpga_interchange.nextpnr_emit:main', 'fpga_inter_patch=fpga_interchange.patch:main', 'fpga_inter_yosys_json=fpga_interchange.yosys_json:main', ], })
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], ) Add entry points for command line tools. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], entry_points={ 'console_scripts': [ 'fpga_inter_add_prim_lib=fpga_interchange.add_prim_lib:main', 'fpga_inter_convert=fpga_interchange.convert:main', 'fpga_inter_nextpnr_emit=fpga_interchange.nextpnr_emit:main', 'fpga_inter_patch=fpga_interchange.patch:main', 'fpga_inter_yosys_json=fpga_interchange.yosys_json:main', ], })
<commit_before>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], ) <commit_msg>Add entry points for command line tools. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com><commit_after>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2020 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="python-fpga-interchange", version="0.0.3", author="SymbiFlow Authors", author_email="symbiflow@lists.librecores.org", description="Python library for reading and writing FPGA interchange files", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/SymbiFlow/python-fpga-interchange", python_requires=">=3.7", packages=setuptools.find_packages(), include_package_data=True, install_requires=["pycapnp", "python-sat"], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: ISC License", "Operating System :: OS Independent", ], entry_points={ 'console_scripts': [ 'fpga_inter_add_prim_lib=fpga_interchange.add_prim_lib:main', 'fpga_inter_convert=fpga_interchange.convert:main', 'fpga_inter_nextpnr_emit=fpga_interchange.nextpnr_emit:main', 'fpga_inter_patch=fpga_interchange.patch:main', 'fpga_inter_yosys_json=fpga_interchange.yosys_json:main', ], })
765807cca72cc82c889cf352fefc24c5bb00fe06
setup.py
setup.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], ) ], )
#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], extra_link_args=["-lm"], ) ], )
Add -lm as floor() is used
Add -lm as floor() is used This fixes build on armv7hl: /usr/bin/ld: build/temp.linux-armv8l-3.10/src/numbers.o: in function `_PyFloat_is_Intlike': /home/iurt/rpmbuild/BUILD/fastnumbers-3.2.1/src/numbers.c:69: undefined reference to `floor'
Python
mit
SethMMorton/fastnumbers,SethMMorton/fastnumbers,SethMMorton/fastnumbers
#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], ) ], ) Add -lm as floor() is used This fixes build on armv7hl: /usr/bin/ld: build/temp.linux-armv8l-3.10/src/numbers.o: in function `_PyFloat_is_Intlike': /home/iurt/rpmbuild/BUILD/fastnumbers-3.2.1/src/numbers.c:69: undefined reference to `floor'
#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], extra_link_args=["-lm"], ) ], )
<commit_before>#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], ) ], ) <commit_msg>Add -lm as floor() is used This fixes build on armv7hl: /usr/bin/ld: build/temp.linux-armv8l-3.10/src/numbers.o: in function `_PyFloat_is_Intlike': /home/iurt/rpmbuild/BUILD/fastnumbers-3.2.1/src/numbers.c:69: undefined reference to `floor'<commit_after>
#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], extra_link_args=["-lm"], ) ], )
#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], ) ], ) Add -lm as floor() is used This fixes build on armv7hl: /usr/bin/ld: build/temp.linux-armv8l-3.10/src/numbers.o: in function `_PyFloat_is_Intlike': /home/iurt/rpmbuild/BUILD/fastnumbers-3.2.1/src/numbers.c:69: undefined reference to `floor'#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], extra_link_args=["-lm"], ) ], )
<commit_before>#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], ) ], ) <commit_msg>Add -lm as floor() is used This fixes build on armv7hl: /usr/bin/ld: build/temp.linux-armv8l-3.10/src/numbers.o: in function `_PyFloat_is_Intlike': /home/iurt/rpmbuild/BUILD/fastnumbers-3.2.1/src/numbers.c:69: undefined reference to `floor'<commit_after>#! /usr/bin/env python # -*- coding: utf-8 -*- # Std lib imports import glob import os # Non-std lib imports from setuptools import Extension, find_packages, setup # Define how to build the extension module. # All other data is in the setup.cfg file. setup( name="fastnumbers", version="3.2.1", python_requires=">=3.6", packages=find_packages(where="src"), package_dir={"": "src"}, package_data={"fastnumbers": ["py.typed", "*.pyi"]}, zip_safe=False, ext_modules=[ Extension( "fastnumbers.fastnumbers", sorted(glob.glob("src/*.c")), include_dirs=[os.path.abspath(os.path.join("include"))], extra_compile_args=[], extra_link_args=["-lm"], ) ], )
a7e3c6f63c9f98cd316c0729cf2689cf863dc81f
setup.py
setup.py
from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*.js', 'htdocs/images/*.jpg', 'templates/*.html']}, )
from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*', 'htdocs/images/*.jpg', 'templates/*.html']}, )
Fix that will hopefully include all the contents (files and directories) of htdocs/js/
Fix that will hopefully include all the contents (files and directories) of htdocs/js/
Python
bsd-3-clause
filipefigcorreia/TracAdaptiveSoftwareArtifacts,filipefigcorreia/TracAdaptiveSoftwareArtifacts,filipefigcorreia/TracAdaptiveSoftwareArtifacts
from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*.js', 'htdocs/images/*.jpg', 'templates/*.html']}, ) Fix that will hopefully include all the contents (files and directories) of htdocs/js/
from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*', 'htdocs/images/*.jpg', 'templates/*.html']}, )
<commit_before>from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*.js', 'htdocs/images/*.jpg', 'templates/*.html']}, ) <commit_msg>Fix that will hopefully include all the contents (files and directories) of htdocs/js/<commit_after>
from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*', 'htdocs/images/*.jpg', 'templates/*.html']}, )
from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*.js', 'htdocs/images/*.jpg', 'templates/*.html']}, ) Fix that will hopefully include all the contents (files and directories) of htdocs/js/from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*', 'htdocs/images/*.jpg', 'templates/*.html']}, )
<commit_before>from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*.js', 'htdocs/images/*.jpg', 'templates/*.html']}, ) <commit_msg>Fix that will hopefully include all the contents (files and directories) of htdocs/js/<commit_after>from setuptools import setup, find_packages PACKAGE = 'AdaptiveArtifacts' VERSION = '0.2' setup(name=PACKAGE, version=VERSION, author='Filipe Correia', author_email='filipe dot correia at fe dot up dot pt', long_description=""" This Trac plugin allows to create information following an arbitrary structure, that can emerge during a project's lifetime, rather than having to be established at installation/configuration time. """, packages=find_packages(exclude=['*.tests']), entry_points={ 'trac.plugins': [ '%s = AdaptiveArtifacts' % PACKAGE, '%s.setup = AdaptiveArtifacts.persistence.db' % PACKAGE, ] }, package_data={'AdaptiveArtifacts': ['htdocs/css/*.css', 'htdocs/js/*', 'htdocs/images/*.jpg', 'templates/*.html']}, )
86d1bb54b3429b0e1c91e2f387c74e7858e31d72
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.3', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], )
#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.4', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], )
Upgrade ver a little more
Upgrade ver a little more
Python
mit
spapas/django-mailer-server-backend
#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.3', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], ) Upgrade ver a little more
#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.4', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.3', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], ) <commit_msg>Upgrade ver a little more<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.4', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], )
#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.3', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], ) Upgrade ver a little more#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.4', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.3', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], ) <commit_msg>Upgrade ver a little more<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() setup( name='django-mailer-server-backend', version='0.1.4', description="A django mail backend for mailer server (https://github.com/spapas/mailer_server)", long_description=readme(), author='Serafeim Papastefanos', author_email='spapas@gmail.com', license='MIT', url='https://github.com/spapas/django-mailer-server-backend', zip_safe=False, include_package_data=True, packages=find_packages(exclude=['tests.*', 'tests',]), install_requires=['Django >= 1.11', 'six'], classifiers=[ 'Environment :: Web Environment', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Framework :: Django', 'Framework :: Django :: 2.0', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], )
d376d85d246092043f5eb9557e9e5b2fbbaffa09
setup.py
setup.py
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() )
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', 'aenum', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() )
Add `aenum` to the OSS dependencies.
Add `aenum` to the OSS dependencies. PiperOrigin-RevId: 270454087
Python
apache-2.0
google/neural-tangents
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() ) Add `aenum` to the OSS dependencies. PiperOrigin-RevId: 270454087
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', 'aenum', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() )
<commit_before># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() ) <commit_msg>Add `aenum` to the OSS dependencies. PiperOrigin-RevId: 270454087<commit_after>
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', 'aenum', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() )
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() ) Add `aenum` to the OSS dependencies. PiperOrigin-RevId: 270454087# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', 'aenum', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() )
<commit_before># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() ) <commit_msg>Add `aenum` to the OSS dependencies. PiperOrigin-RevId: 270454087<commit_after># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import setuptools INSTALL_REQUIRES = [ 'absl-py', 'numpy', 'jax', 'aenum', ] setuptools.setup( name='neural-tangents', version='0.0.0', license='Apache 2.0', author='Google', author_email='neural-tangents-dev@google.com', install_requires=INSTALL_REQUIRES, url='https://github.com/google/neural-tangents', packages=setuptools.find_packages() )
dec2ba3a5d01516c0aa745b1d1b3cebfffeb3974
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock<2.0.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], )
#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock>=1.2.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], )
Fix requests-mock version requirement (>=1.2.0)
Fix requests-mock version requirement (>=1.2.0)
Python
mit
novafloss/mock-services
#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock<2.0.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], ) Fix requests-mock version requirement (>=1.2.0)
#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock>=1.2.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], )
<commit_before>#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock<2.0.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], ) <commit_msg>Fix requests-mock version requirement (>=1.2.0)<commit_after>
#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock>=1.2.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], )
#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock<2.0.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], ) Fix requests-mock version requirement (>=1.2.0)#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock>=1.2.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], )
<commit_before>#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock<2.0.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], ) <commit_msg>Fix requests-mock version requirement (>=1.2.0)<commit_after>#!/usr/bin/env python import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) setup( name='mock-services', version=open(os.path.join(here, 'VERSION')).read().strip(), description='Mock services.', long_description=open(os.path.join(here, 'README.rst')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords=[ 'http', 'mock', 'requests', 'rest', ], author='Florent Pigout', author_email='florent.pigout@novapost.fr', url='https://github.com/novafloss/mock-services', license='MIT', install_requires=[ 'attrs', 'funcsigs', 'requests-mock>=1.2.0', ], extras_require={ 'test': [ 'flake8' ], 'release': [ 'wheel', 'zest.releaser' ], }, packages=[ 'mock_services' ], )
b7e95009ec210ff199e36a3ca065d3efea82f940
setup.py
setup.py
#! /usr/bin/env python from setuptools import setup version = '0.0.0' setup(name="python-irclib2", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, )
#! /usr/bin/env python from setuptools import setup version = '0.4.8.1' setup(name="python-irclib", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, )
Use a sane version number, based on the original from SF.net
Use a sane version number, based on the original from SF.net
Python
lgpl-2.1
danfairs/python-irclib
#! /usr/bin/env python from setuptools import setup version = '0.0.0' setup(name="python-irclib2", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, ) Use a sane version number, based on the original from SF.net
#! /usr/bin/env python from setuptools import setup version = '0.4.8.1' setup(name="python-irclib", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, )
<commit_before>#! /usr/bin/env python from setuptools import setup version = '0.0.0' setup(name="python-irclib2", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, ) <commit_msg>Use a sane version number, based on the original from SF.net<commit_after>
#! /usr/bin/env python from setuptools import setup version = '0.4.8.1' setup(name="python-irclib", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, )
#! /usr/bin/env python from setuptools import setup version = '0.0.0' setup(name="python-irclib2", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, ) Use a sane version number, based on the original from SF.net#! /usr/bin/env python from setuptools import setup version = '0.4.8.1' setup(name="python-irclib", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, )
<commit_before>#! /usr/bin/env python from setuptools import setup version = '0.0.0' setup(name="python-irclib2", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, ) <commit_msg>Use a sane version number, based on the original from SF.net<commit_after>#! /usr/bin/env python from setuptools import setup version = '0.4.8.1' setup(name="python-irclib", version=version, py_modules=["irclib", "ircbot"], author="Joel Rosdahl", author_email="jsoel@rosdahl.net", url="http://python-irclib.sourceforge.net", zip_safe=False, )
4dfe8effc032bccf85badacedb63178ba3806449
setup.py
setup.py
"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.15.1', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, )
"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.16.0', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, )
Bump version for stip_binary flag
Bump version for stip_binary flag
Python
mit
kevinconway/rpmvenv
"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.15.1', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, ) Bump version for stip_binary flag
"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.16.0', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, )
<commit_before>"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.15.1', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, ) <commit_msg>Bump version for stip_binary flag<commit_after>
"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.16.0', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, )
"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.15.1', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, ) Bump version for stip_binary flag"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.16.0', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, )
<commit_before>"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.15.1', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, ) <commit_msg>Bump version for stip_binary flag<commit_after>"""Setuptools configuration for rpmvenv.""" from setuptools import setup from setuptools import find_packages with open('README.rst', 'r') as readmefile: README = readmefile.read() setup( name='rpmvenv', version='0.16.0', url='https://github.com/kevinconway/rpmvenv', description='RPM packager for Python virtualenv.', author="Kevin Conway", author_email="kevinjacobconway@gmail.com", long_description=README, license='MIT', packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), install_requires=[ 'jinja2', 'venvctrl', 'argparse', 'confpy', 'ordereddict', 'semver', ], entry_points={ 'console_scripts': [ 'rpmvenv = rpmvenv.cli:main', ], 'rpmvenv.extensions': [ 'core = rpmvenv.extensions.core:Extension', 'file_permissions = rpmvenv.extensions.files.permissions:Extension', 'file_extras = rpmvenv.extensions.files.extras:Extension', 'python_venv = rpmvenv.extensions.python.venv:Extension', 'blocks = rpmvenv.extensions.blocks.generic:Extension', ] }, package_data={ "rpmvenv": ["templates/*"], }, )
7fa6690bd0d473f61b373cbb4bea3f8c5c3e60ba
setup.py
setup.py
import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 0 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], )
import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 1 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], )
Update version to match version reported on website
BLD: Update version to match version reported on website
Python
mit
jjhelmus/wradlib,jjhelmus/wradlib
import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 0 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], ) BLD: Update version to match version reported on website
import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 1 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], )
<commit_before>import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 0 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], ) <commit_msg>BLD: Update version to match version reported on website<commit_after>
import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 1 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], )
import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 0 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], ) BLD: Update version to match version reported on websiteimport os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 1 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], )
<commit_before>import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 0 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], ) <commit_msg>BLD: Update version to match version reported on website<commit_after>import os # BEFORE importing distutils, remove MANIFEST. distutils doesn't # properly update it when the contents of directories change. if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup MAJOR = 0 MINOR = 1 MICRO = 1 VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) if __name__ == '__main__': setup(name='wradlib', version=VERSION, description='Open Source Library for Weather Radar Data Processing', long_description = """\ wradlib - An Open Source Library for Weather Radar Data Processing ================================================================== wradlib is designed to assist you in the most important steps of processing weather radar data. These may include: reading common data formats, georeferencing, converting reflectivity to rainfall intensity, identifying and correcting typical error sources (such as clutter or attenuation) and visualising the data. """, license='BSD', url='http://wradlib.bitbucket.org/', download_url='https://bitbucket.org/wradlib/wradlib', packages=['wradlib'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], )
4026eb55cf57612cff6c62669d501c2d48af74b7
setup.py
setup.py
#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join here = dirname(__file__) setup( name='ledgerblue', version='0.1.4', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: POSIX :: Windows', 'Operating System :: MacOS :: MacOS X' ] )
#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join import os os.environ['SECP_BUNDLED_EXPERIMENTAL'] = "1" here = dirname(__file__) setup( name='ledgerblue', version='0.1.5', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), url='https://github.com/LedgerHQ/blue-loader-python', packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X' ] )
Tag 0.1.5, tweaks for PyPI compatibility
Tag 0.1.5, tweaks for PyPI compatibility
Python
apache-2.0
LedgerHQ/blue-loader-python
#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join here = dirname(__file__) setup( name='ledgerblue', version='0.1.4', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: POSIX :: Windows', 'Operating System :: MacOS :: MacOS X' ] ) Tag 0.1.5, tweaks for PyPI compatibility
#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join import os os.environ['SECP_BUNDLED_EXPERIMENTAL'] = "1" here = dirname(__file__) setup( name='ledgerblue', version='0.1.5', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), url='https://github.com/LedgerHQ/blue-loader-python', packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X' ] )
<commit_before>#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join here = dirname(__file__) setup( name='ledgerblue', version='0.1.4', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: POSIX :: Windows', 'Operating System :: MacOS :: MacOS X' ] ) <commit_msg>Tag 0.1.5, tweaks for PyPI compatibility<commit_after>
#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join import os os.environ['SECP_BUNDLED_EXPERIMENTAL'] = "1" here = dirname(__file__) setup( name='ledgerblue', version='0.1.5', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), url='https://github.com/LedgerHQ/blue-loader-python', packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X' ] )
#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join here = dirname(__file__) setup( name='ledgerblue', version='0.1.4', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: POSIX :: Windows', 'Operating System :: MacOS :: MacOS X' ] ) Tag 0.1.5, tweaks for PyPI compatibility#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join import os os.environ['SECP_BUNDLED_EXPERIMENTAL'] = "1" here = dirname(__file__) setup( name='ledgerblue', version='0.1.5', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), url='https://github.com/LedgerHQ/blue-loader-python', packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X' ] )
<commit_before>#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join here = dirname(__file__) setup( name='ledgerblue', version='0.1.4', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: POSIX :: Windows', 'Operating System :: MacOS :: MacOS X' ] ) <commit_msg>Tag 0.1.5, tweaks for PyPI compatibility<commit_after>#from distribute_setup import use_setuptools #use_setuptools() from setuptools import setup, find_packages from os.path import dirname, join import os os.environ['SECP_BUNDLED_EXPERIMENTAL'] = "1" here = dirname(__file__) setup( name='ledgerblue', version='0.1.5', author='Ledger', author_email='hello@ledger.fr', description='Python library to communicate with Ledger Blue/Nano S', long_description=open(join(here, 'README.md')).read(), url='https://github.com/LedgerHQ/blue-loader-python', packages=find_packages(), install_requires=['hidapi>=0.7.99', 'secp256k1>=0.12.1', 'pycrypto>=2.6.1'], extras_require = { 'smartcard': [ 'python-pyscard>=1.6.12-4build1' ] }, include_package_data=True, zip_safe=False, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X' ] )
f1a2a8ff1f0655489eac875f4e4b2aece10843da
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, )
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', 'organizations.backends', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2,<1.7', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, )
Add backends as explicit package
Add backends as explicit package
Python
bsd-2-clause
bennylope/django-organizations,DESHRAJ/django-organizations,st8st8/django-organizations,DESHRAJ/django-organizations,bennylope/django-organizations,st8st8/django-organizations,GauthamGoli/django-organizations,GauthamGoli/django-organizations
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, ) Add backends as explicit package
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', 'organizations.backends', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2,<1.7', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, ) <commit_msg>Add backends as explicit package<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', 'organizations.backends', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2,<1.7', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, )
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, ) Add backends as explicit package#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', 'organizations.backends', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2,<1.7', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, ) <commit_msg>Add backends as explicit package<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version='0.2.0', description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/wellfire/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', 'organizations.backends', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2,<1.7', 'django-extensions>=0.9', ], test_suite='tests', include_package_data=True, zip_safe=False, )
2aeb0b13c5f5a367bf4c81dd3557cc07d1c6182e
setup.py
setup.py
#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.8" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } )
#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.6" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } )
Use irclib 0.4.6, which is available via pip
Use irclib 0.4.6, which is available via pip
Python
mit
meska/cobe,wodim/cobe-ng,DarkMio/cobe,DarkMio/cobe,tiagochiavericosta/cobe,tiagochiavericosta/cobe,pteichman/cobe,wodim/cobe-ng,pteichman/cobe,LeMagnesium/cobe,LeMagnesium/cobe,meska/cobe
#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.8" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } ) Use irclib 0.4.6, which is available via pip
#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.6" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } )
<commit_before>#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.8" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } ) <commit_msg>Use irclib 0.4.6, which is available via pip<commit_after>
#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.6" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } )
#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.8" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } ) Use irclib 0.4.6, which is available via pip#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.6" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } )
<commit_before>#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.8" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } ) <commit_msg>Use irclib 0.4.6, which is available via pip<commit_after>#!/usr/bin/env python # Require setuptools. See http://pypi.python.org/pypi/setuptools for # installation instructions, or run the ez_setup script found at # http://peak.telecommunity.com/dist/ez_setup.py from setuptools import setup, find_packages setup( name = "cobe", version = "2.0.0", author = "Peter Teichman", author_email = "peter@teichman.org", url = "http://wiki.github.com/pteichman/cobe/", description = "A conversation simulator similar to MegaHAL", packages = ["cobe"], test_suite = "tests", setup_requires = [ "nose==1.1.2", "coverage==3.5" ], install_requires = [ "PyStemmer==1.2.0", "argparse==1.2.1", "python-irclib==0.4.6" ], classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence" ], entry_points = { "console_scripts" : [ "cobe = cobe.control:main" ] } )
aab258a8f4898db3829754c1de6a81c65ea37d07
setup.py
setup.py
import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' )
import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper', 'lc_wrapper.ipython'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' )
Add sub-package for IPython Kernel
Add sub-package for IPython Kernel
Python
bsd-3-clause
NII-cloud-operation/Jupyter-LC_wrapper,NII-cloud-operation/Jupyter-LC_wrapper
import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' ) Add sub-package for IPython Kernel
import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper', 'lc_wrapper.ipython'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' )
<commit_before>import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' ) <commit_msg>Add sub-package for IPython Kernel<commit_after>
import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper', 'lc_wrapper.ipython'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' )
import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' ) Add sub-package for IPython Kernelimport os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper', 'lc_wrapper.ipython'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' )
<commit_before>import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' ) <commit_msg>Add sub-package for IPython Kernel<commit_after>import os from setuptools import setup HERE = os.path.abspath(os.path.dirname(__file__)) VERSION_NS = {} with open(os.path.join(HERE, 'lc_wrapper', '_version.py')) as f: exec(f.read(), {}, VERSION_NS) setup( name='lc_wrapper', version=VERSION_NS['__version__'], packages=['lc_wrapper', 'lc_wrapper.ipython'], install_requires=['ipykernel>=4.0.0', 'jupyter_client', 'python-dateutil'], description='Kernel Wrapper for Literate Computing', author='NII Cloud Operation Team', url='https://github.com/NII-cloud-operation/' )
e3a68ffe10a5f1948b65e6b4bb5bfa03308135a9
setup.py
setup.py
from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select>=0.7.0', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ])
from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ])
Remove version restriction for select
Remove version restriction for select
Python
mit
crosscompute/crosscompute-types
from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select>=0.7.0', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ]) Remove version restriction for select
from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ])
<commit_before>from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select>=0.7.0', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ]) <commit_msg>Remove version restriction for select<commit_after>
from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ])
from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select>=0.7.0', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ]) Remove version restriction for selectfrom os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ])
<commit_before>from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select>=0.7.0', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ]) <commit_msg>Remove version restriction for select<commit_after>from os.path import abspath, dirname, join from setuptools import setup FOLDER = dirname(abspath(__file__)) DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [ 'README.rst']) setup( name='crosscompute-types', version='0.7.0', description='Default data type plugins for CrossCompute', long_description=DESCRIPTION, classifiers=[ 'Programming Language :: Python', 'Framework :: Pyramid', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'License :: OSI Approved :: MIT License', ], author='CrossCompute Inc', author_email='support@crosscompute.com', url='https://crosscompute.com/docs', keywords='web pyramid pylons crosscompute', zip_safe=True, setup_requires=[ 'pytest-runner', ], install_requires=[ 'crosscompute>=0.7.0', 'crosscompute-integer>=0.7.0', 'crosscompute-text>=0.7.0', 'crosscompute-table>=0.7.0', 'crosscompute-select', 'crosscompute-image>=0.7.0', 'crosscompute-audio', 'crosscompute-video', 'crosscompute-geotable>=0.7.0', ])
bf24b8dab13c3779514a00d61c3ea440704b1cbf
setup.py
setup.py
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } )
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
Add optional dependency on arpy
Add optional dependency on arpy
Python
bsd-2-clause
angr/cle
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } ) Add optional dependency on arpy
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
<commit_before>try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } ) <commit_msg>Add optional dependency on arpy<commit_after>
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } ) Add optional dependency on arpytry: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
<commit_before>try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } ) <commit_msg>Add optional dependency on arpy<commit_after>try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
1343cfa4654810edcda775a889acc6395ebc5af5
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, )
#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', ], dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, )
Add missing line of code (merge/rebase effect).
Add missing line of code (merge/rebase effect).
Python
apache-2.0
VladimirTyrin/letsencrypt,wteiken/letsencrypt,rlustin/letsencrypt,rlustin/letsencrypt,ahojjati/letsencrypt,beermix/letsencrypt,bestwpw/letsencrypt,sapics/letsencrypt,sjerdo/letsencrypt,diracdeltas/lets-encrypt-preview,digideskio/lets-encrypt-preview,thanatos/lets-encrypt-preview,vcavallo/letsencrypt,solidgoldbomb/letsencrypt,DavidGarciaCat/letsencrypt,stewnorriss/letsencrypt,Hasimir/letsencrypt,tdfischer/lets-encrypt-preview,beermix/letsencrypt,xgin/letsencrypt,dietsche/letsencrypt,rugk/letsencrypt,DavidGarciaCat/letsencrypt,bsmr-misc-forks/letsencrypt,rutsky/letsencrypt,brentdax/letsencrypt,piru/letsencrypt,martindale/letsencrypt,BKreisel/letsencrypt,stewnorriss/letsencrypt,Jadaw1n/letsencrypt,skynet/letsencrypt,goofwear/letsencrypt,xgin/letsencrypt,BillKeenan/lets-encrypt-preview,Sveder/letsencrypt,riseofthetigers/letsencrypt,stweil/letsencrypt,bestwpw/letsencrypt,lbeltrame/letsencrypt,jsha/letsencrypt,VladimirTyrin/letsencrypt,modulexcite/letsencrypt,goofwear/letsencrypt,wteiken/letsencrypt,hsduk/lets-encrypt-preview,ahojjati/letsencrypt,solidgoldbomb/letsencrypt,skynet/letsencrypt,lbeltrame/letsencrypt,kevinlondon/letsencrypt,jmaurice/letsencrypt,martindale/letsencrypt,mitnk/letsencrypt,luorenjin/letsencrypt,letsencrypt/letsencrypt,Jonadabe/letsencrypt,BillKeenan/lets-encrypt-preview,tyagi-prashant/letsencrypt,twstrike/le_for_patching,ghyde/letsencrypt,jmhodges/letsencrypt,dietsche/letsencrypt,fmarier/letsencrypt,TheBoegl/letsencrypt,bsmr-misc-forks/letsencrypt,mrb/letsencrypt,hlieberman/letsencrypt,lmcro/letsencrypt,twstrike/le_for_patching,jmhodges/letsencrypt,PeterMosmans/letsencrypt,stweil/letsencrypt,jtl999/certbot,jmaurice/letsencrypt,kuba/letsencrypt,vcavallo/letsencrypt,letsencrypt/letsencrypt,PeterMosmans/letsencrypt,hsduk/lets-encrypt-preview,g1franc/lets-encrypt-preview,mitnk/letsencrypt,tdfischer/lets-encrypt-preview,sapics/letsencrypt,Hasimir/letsencrypt,piru/letsencrypt,jtl999/certbot,g1franc/lets-encrypt-preview,kevinlondon/letsencrypt,jsha/letsencrypt,deserted/letsencrypt,ruo91/letsencrypt,Jonadabe/letsencrypt,kuba/letsencrypt,armersong/letsencrypt,rutsky/letsencrypt,Bachmann1234/letsencrypt,deserted/letsencrypt,ruo91/letsencrypt,mrb/letsencrypt,BKreisel/letsencrypt,Sveder/letsencrypt,diracdeltas/lets-encrypt-preview,brentdax/letsencrypt,digideskio/lets-encrypt-preview,armersong/letsencrypt,Jadaw1n/letsencrypt,sjerdo/letsencrypt,riseofthetigers/letsencrypt,lmcro/letsencrypt,Bachmann1234/letsencrypt,tyagi-prashant/letsencrypt,fmarier/letsencrypt,TheBoegl/letsencrypt,rugk/letsencrypt,luorenjin/letsencrypt,thanatos/lets-encrypt-preview,modulexcite/letsencrypt,ghyde/letsencrypt,hlieberman/letsencrypt
#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, ) Add missing line of code (merge/rebase effect).
#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', ], dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, )
<commit_before>#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, ) <commit_msg>Add missing line of code (merge/rebase effect).<commit_after>
#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', ], dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, )
#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, ) Add missing line of code (merge/rebase effect).#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', ], dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, )
<commit_before>#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, ) <commit_msg>Add missing line of code (merge/rebase effect).<commit_after>#!/usr/bin/env python from setuptools import setup setup( name="letsencrypt", version="0.1", description="Let's Encrypt", author="Let's Encrypt Project", license="", url="https://letsencrypt.org", packages=[ 'letsencrypt', 'letsencrypt.client', 'letsencrypt.scripts', ], install_requires=[ 'jsonschema', 'M2Crypto', 'pycrypto', 'python-augeas', 'python2-pythondialog', 'requests', ], dependency_links=[ # http://augeas.net/download.html 'https://fedorahosted.org/released/python-augeas/', ], entry_points={ 'console_scripts': [ 'letsencrypt = letsencrypt.scripts.main:main', ], }, zip_safe=False, include_package_data=True, )
3feebdff4cb2ae82bd12b0932eda0e37391558ec
setup.py
setup.py
# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.0" setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], )
# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.1" # read the contents of your README file from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], long_description=long_description, long_description_content_type="text/markdown", )
Include README in the module description
Include README in the module description
Python
mit
Zebradil/powerline-taskwarrior,Zebradil/powerline-taskwarrior
# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.0" setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], ) Include README in the module description
# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.1" # read the contents of your README file from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], long_description=long_description, long_description_content_type="text/markdown", )
<commit_before># vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.0" setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], ) <commit_msg>Include README in the module description<commit_after>
# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.1" # read the contents of your README file from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], long_description=long_description, long_description_content_type="text/markdown", )
# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.0" setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], ) Include README in the module description# vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.1" # read the contents of your README file from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], long_description=long_description, long_description_content_type="text/markdown", )
<commit_before># vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.0" setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], ) <commit_msg>Include README in the module description<commit_after># vim:fileencoding=utf-8:noet from setuptools import setup version = "0.7.1" # read the contents of your README file from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="powerline-taskwarrior", description="Powerline segments for showing information from the Taskwarrior task manager", version=version, keywords="powerline taskwarrior context prompt", license="MIT", author="German Lashevich", author_email="german.lashevich@gmail.com", url="https://github.com/zebradil/powerline-taskwarrior", download_url="https://github.com/zebradil/powerline-taskwarrior/tarball/{version}".format(version=version), packages=["powerline_taskwarrior"], install_requires=["powerline-status"], classifiers=[ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Terminals", ], long_description=long_description, long_description_content_type="text/markdown", )
132e877a851bfdc8975e2fdc7f2a594fd4fc3d1b
setup.py
setup.py
from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="founders@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], )
from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="development@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], )
Change author_email to another address
Change author_email to another address
Python
bsd-3-clause
Intelworks/cabby
from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="founders@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], ) Change author_email to another address
from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="development@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], )
<commit_before>from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="founders@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], ) <commit_msg>Change author_email to another address<commit_after>
from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="development@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], )
from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="founders@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], ) Change author_email to another addressfrom setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="development@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], )
<commit_before>from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="founders@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], ) <commit_msg>Change author_email to another address<commit_after>from setuptools import setup, find_packages setup( name="taxii-client", version="0.0.2", url="https://github.com/Intelworks/taxii-client/", author="Intelworks", author_email="development@intelworks.com", packages=find_packages(), scripts=[ 'bin/taxii-collections', 'bin/taxii-discovery', 'bin/taxii-poll', 'bin/taxii-push', ], install_requires=[ 'libtaxii==1.1.105-SNAPSHOT', 'pytz', 'colorlog', ], )
db7e2f16f1d394e430e60d2eaa40c5e0d8b22a46
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ # 'Django', 'lockfile >= 0.8', ], )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ 'Django >= 1.4', 'lockfile >= 0.8', ], )
Revert "Removed Django requirement due versioning"
Revert "Removed Django requirement due versioning" This reverts commit 78bae13778ec70cd3654e74a684839914f44364e.
Python
mit
temnoregg/django-mailer
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ # 'Django', 'lockfile >= 0.8', ], ) Revert "Removed Django requirement due versioning" This reverts commit 78bae13778ec70cd3654e74a684839914f44364e.
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ 'Django >= 1.4', 'lockfile >= 0.8', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ # 'Django', 'lockfile >= 0.8', ], ) <commit_msg>Revert "Removed Django requirement due versioning" This reverts commit 78bae13778ec70cd3654e74a684839914f44364e.<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ 'Django >= 1.4', 'lockfile >= 0.8', ], )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ # 'Django', 'lockfile >= 0.8', ], ) Revert "Removed Django requirement due versioning" This reverts commit 78bae13778ec70cd3654e74a684839914f44364e.#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ 'Django >= 1.4', 'lockfile >= 0.8', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ # 'Django', 'lockfile >= 0.8', ], ) <commit_msg>Revert "Removed Django requirement due versioning" This reverts commit 78bae13778ec70cd3654e74a684839914f44364e.<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="django-mailer", version=__import__("mailer").__version__, description="A reusable Django app for queuing the sending of email", long_description=open("docs/usage.rst").read() + open("CHANGES.rst").read(), author="Pinax Team", author_email="developers@pinaxproject.com", url="http://github.com/pinax/django-mailer/", packages=find_packages(), package_dir={"mailer": "mailer"}, package_data={'mailer': ['locale/*/LC_MESSAGES/*.*']}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Framework :: Django", ], install_requires=[ 'Django >= 1.4', 'lockfile >= 0.8', ], )
88d15544556cdfc9fe1f2e000f67846a8cd1bb25
stginga/__init__.py
stginga/__init__.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Set up the version from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed __version__ = 'unknown' # UI from .plugin_info import * # noqa
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # noqa # ---------------------------------------------------------------------------- # UI from .plugin_info import * # noqa
Remove duplicate version and restore test runner
BUG: Remove duplicate version and restore test runner
Python
bsd-3-clause
pllim/stginga,spacetelescope/stginga
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Set up the version from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed __version__ = 'unknown' # UI from .plugin_info import * # noqa BUG: Remove duplicate version and restore test runner
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # noqa # ---------------------------------------------------------------------------- # UI from .plugin_info import * # noqa
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Set up the version from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed __version__ = 'unknown' # UI from .plugin_info import * # noqa <commit_msg>BUG: Remove duplicate version and restore test runner<commit_after>
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # noqa # ---------------------------------------------------------------------------- # UI from .plugin_info import * # noqa
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Set up the version from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed __version__ = 'unknown' # UI from .plugin_info import * # noqa BUG: Remove duplicate version and restore test runner# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # noqa # ---------------------------------------------------------------------------- # UI from .plugin_info import * # noqa
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Set up the version from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed __version__ = 'unknown' # UI from .plugin_info import * # noqa <commit_msg>BUG: Remove duplicate version and restore test runner<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst """ Ginga products specific to STScI data analysis. """ # Packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # noqa # ---------------------------------------------------------------------------- # UI from .plugin_info import * # noqa
721548eef3abaecb187b2246b58f90d74e0026ab
currencies/models.py
currencies/models.py
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): if self.is_default: try: default_currency = Currency.objects.get(is_default=True) except DoesNotExist: pass else: default_currency.is_default = False default_currency.save() super(Currency, self).save(**kwargs)
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
Improve the uniqueness of Currency.is_default
Improve the uniqueness of Currency.is_default
Python
bsd-3-clause
pathakamit88/django-currencies,barseghyanartur/django-currencies,racitup/django-currencies,mysociety/django-currencies,ydaniv/django-currencies,mysociety/django-currencies,jmp0xf/django-currencies,marcosalcazar/django-currencies,bashu/django-simple-currencies,ydaniv/django-currencies,racitup/django-currencies,pathakamit88/django-currencies,panosl/django-currencies,bashu/django-simple-currencies,panosl/django-currencies,marcosalcazar/django-currencies
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): if self.is_default: try: default_currency = Currency.objects.get(is_default=True) except DoesNotExist: pass else: default_currency.is_default = False default_currency.save() super(Currency, self).save(**kwargs) Improve the uniqueness of Currency.is_default
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
<commit_before>from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): if self.is_default: try: default_currency = Currency.objects.get(is_default=True) except DoesNotExist: pass else: default_currency.is_default = False default_currency.save() super(Currency, self).save(**kwargs) <commit_msg>Improve the uniqueness of Currency.is_default<commit_after>
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): if self.is_default: try: default_currency = Currency.objects.get(is_default=True) except DoesNotExist: pass else: default_currency.is_default = False default_currency.save() super(Currency, self).save(**kwargs) Improve the uniqueness of Currency.is_defaultfrom django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
<commit_before>from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): if self.is_default: try: default_currency = Currency.objects.get(is_default=True) except DoesNotExist: pass else: default_currency.is_default = False default_currency.save() super(Currency, self).save(**kwargs) <commit_msg>Improve the uniqueness of Currency.is_default<commit_after>from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
2a8a13986b29bdc405fc922143e3407c81f196c0
timpani/settings.py
timpani/settings.py
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit()
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0
Add setting validation function for title
Add setting validation function for title
Python
mit
ollien/Timpani,ollien/Timpani,ollien/Timpani
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() Add setting validation function for title
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0
<commit_before>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() <commit_msg>Add setting validation function for title<commit_after>
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() Add setting validation function for titlefrom . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0
<commit_before>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() <commit_msg>Add setting validation function for title<commit_after>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0
8c587107b73685a99df3358f8d219ed5c76e0a48
csunplugged/utils/check_glossary_links.py
csunplugged/utils/check_glossary_links.py
"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): if not GlossaryTerm.objects.filter(slug=term).exists(): raise CouldNotFindGlossaryTerm(term, md_file_path)
"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): try: GlossaryTerm.objects.get(slug=term) except ObjectDoesNotExist: raise CouldNotFindGlossaryTerm(term, md_file_path)
Use clearer Django method and exception for glossary term checking
Use clearer Django method and exception for glossary term checking
Python
mit
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): if not GlossaryTerm.objects.filter(slug=term).exists(): raise CouldNotFindGlossaryTerm(term, md_file_path) Use clearer Django method and exception for glossary term checking
"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): try: GlossaryTerm.objects.get(slug=term) except ObjectDoesNotExist: raise CouldNotFindGlossaryTerm(term, md_file_path)
<commit_before>"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): if not GlossaryTerm.objects.filter(slug=term).exists(): raise CouldNotFindGlossaryTerm(term, md_file_path) <commit_msg>Use clearer Django method and exception for glossary term checking<commit_after>
"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): try: GlossaryTerm.objects.get(slug=term) except ObjectDoesNotExist: raise CouldNotFindGlossaryTerm(term, md_file_path)
"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): if not GlossaryTerm.objects.filter(slug=term).exists(): raise CouldNotFindGlossaryTerm(term, md_file_path) Use clearer Django method and exception for glossary term checking"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): try: GlossaryTerm.objects.get(slug=term) except ObjectDoesNotExist: raise CouldNotFindGlossaryTerm(term, md_file_path)
<commit_before>"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): if not GlossaryTerm.objects.filter(slug=term).exists(): raise CouldNotFindGlossaryTerm(term, md_file_path) <commit_msg>Use clearer Django method and exception for glossary term checking<commit_after>"""Module for checking glossary links found within Markdown conversions.""" from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm from topics.models import GlossaryTerm def check_converter_glossary_links(glossary_links, md_file_path): """Process glossary links found by Markdown converter. Args: glossary_links: Dictionary of glossary links (dict). """ for term in glossary_links.keys(): try: GlossaryTerm.objects.get(slug=term) except ObjectDoesNotExist: raise CouldNotFindGlossaryTerm(term, md_file_path)
720da7073b059e68f2c1e9dff9e2d805c27e1296
django_slack/templatetags/django_slack.py
django_slack/templatetags/django_slack.py
from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText)
from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText)
Add missing 2 blank lines after end of function or class. (PEP8 E305)
Add missing 2 blank lines after end of function or class. (PEP8 E305)
Python
bsd-3-clause
lamby/django-slack
from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText) Add missing 2 blank lines after end of function or class. (PEP8 E305)
from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText)
<commit_before>from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText) <commit_msg>Add missing 2 blank lines after end of function or class. (PEP8 E305)<commit_after>
from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText)
from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText) Add missing 2 blank lines after end of function or class. (PEP8 E305)from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText)
<commit_before>from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText) <commit_msg>Add missing 2 blank lines after end of function or class. (PEP8 E305)<commit_after>from django import template from django.utils import six from django.utils.encoding import force_text from django.utils.safestring import SafeText, mark_safe from django.template.defaultfilters import stringfilter try: from django.utils.functional import keep_lazy as allow_lazy except ImportError: from django.utils.functional import allow_lazy register = template.Library() _slack_escapes = { ord('&'): u'&amp;', ord('<'): u'&lt;', ord('>'): u'&gt;', } @register.filter(is_safe=True) @stringfilter def escapeslack(value): """ Returns the given text with ampersands and angle brackets encoded for use in the Slack API, per the Slack API documentation: <https://api.slack.com/docs/formatting#how_to_escape_characters> This is based on django.template.defaultfilters.escapejs. """ return mark_safe(force_text(value).translate(_slack_escapes)) escapeslack = allow_lazy(escapeslack, six.text_type, SafeText)
c3617c5662bb360f03db62df1e8f580502796562
spurl/tests.py
spurl/tests.py
# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com'
# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' def test_url_in_variable(): template = """{% spurl myurl %}""" data = {'myurl': 'http://www.google.com'} assert render(template, data) == 'http://www.google.com'
Add test for url in template context
Add test for url in template context
Python
unlicense
albertkoch/django-spurl,pombredanne/django-spurl,j4mie/django-spurl
# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' Add test for url in template context
# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' def test_url_in_variable(): template = """{% spurl myurl %}""" data = {'myurl': 'http://www.google.com'} assert render(template, data) == 'http://www.google.com'
<commit_before># bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' <commit_msg>Add test for url in template context<commit_after>
# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' def test_url_in_variable(): template = """{% spurl myurl %}""" data = {'myurl': 'http://www.google.com'} assert render(template, data) == 'http://www.google.com'
# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' Add test for url in template context# bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' def test_url_in_variable(): template = """{% spurl myurl %}""" data = {'myurl': 'http://www.google.com'} assert render(template, data) == 'http://www.google.com'
<commit_before># bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' <commit_msg>Add test for url in template context<commit_after># bootstrap django from django.conf import settings settings.configure()#INSTALLED_APPS=('spurl',)) from django.template import Template, Context, loader loader.add_to_builtins('spurl.templatetags.spurl') def render(template_string, dictionary=None): return Template(template_string).render(Context(dictionary)) def test_passthrough(): template = """{% spurl "http://www.google.com" %}""" assert render(template) == 'http://www.google.com' def test_url_in_variable(): template = """{% spurl myurl %}""" data = {'myurl': 'http://www.google.com'} assert render(template, data) == 'http://www.google.com'
fe551b6f4976a1642aa006d5afbcbee2533f08c8
menpofit/dlib/__init__.py
menpofit/dlib/__init__.py
try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass
try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass
Remove non-ASCII character from comment
Remove non-ASCII character from comment
Python
bsd-3-clause
grigorisg9gr/menpofit,yuxiang-zhou/menpofit,grigorisg9gr/menpofit,yuxiang-zhou/menpofit
try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass Remove non-ASCII character from comment
try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass
<commit_before>try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass <commit_msg>Remove non-ASCII character from comment<commit_after>
try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass
try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass Remove non-ASCII character from commenttry: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass
<commit_before>try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass <commit_msg>Remove non-ASCII character from comment<commit_after>try: from .fitter import DlibERT except ImportError: # If dlib is not installed then we shouldn't import anything into this # module. pass
f66c2907652ea5e52eb2c1355d801f5cf6c62a16
remedy/config.py
remedy/config.py
""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): DEBUG = False SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME'))
""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME'))
DEBUG true on deployment to see what's happening
DEBUG true on deployment to see what's happening
Python
mpl-2.0
radioprotector/radremedy,radremedy/radremedy,radioprotector/radremedy,radremedy/radremedy,AllieDeford/radremedy,AllieDeford/radremedy,AllieDeford/radremedy,radremedy/radremedy,radioprotector/radremedy,radioprotector/radremedy,radremedy/radremedy
""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): DEBUG = False SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME')) DEBUG true on deployment to see what's happening
""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME'))
<commit_before>""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): DEBUG = False SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME')) <commit_msg>DEBUG true on deployment to see what's happening<commit_after>
""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME'))
""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): DEBUG = False SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME')) DEBUG true on deployment to see what's happening""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME'))
<commit_before>""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): DEBUG = False SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME')) <commit_msg>DEBUG true on deployment to see what's happening<commit_after>""" config.py Looks for the RAD_PRODUCTION variable and creates path to database """ import os _basedir = os.path.abspath(os.path.dirname(__file__)) class BaseConfig(object): DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'rad/rad.db') MIGRATIONS_DIR = './remedy/rad/migrations' SECRET_KEY = 'Our little secret' class DevelopmentConfig(BaseConfig): pass class ProductionConfig(BaseConfig): SQLALCHEMY_DATABASE_URI = 'mysql://{0}:{1}@{2}/{3}'.format(os.environ.get('RDS_USERNAME'), os.environ.get('RDS_PASSWORD'), os.environ.get('RDS_HOSTNAME'), os.environ.get('RDS_DB_NAME'))
9ea8b1f1f4ccc068b460e76127f288742d25088e
django/contrib/comments/feeds.py
django/contrib/comments/feeds.py
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
Use correct m2m join table name in LatestCommentsFeed git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
Python
bsd-3-clause
sam-tsai/django-old,skevy/django,dcramer/django-compositepks,alex/django-old,Instagram/django,dcramer/django-compositepks,django-nonrel/django-nonrel,alex/django-old,Smarsh/django,Smarsh/django,dcramer/django-compositepks,bfirsh/django-old,bfirsh/django-old,disqus/django-old,t11e/django,mitsuhiko/django,Instagram/django,alex/django-old,Instagram/django,django-nonrel/django-nonrel,sam-tsai/django-old,Smarsh/django,t11e/django,t11e/django,bfirsh/django-old,jamespacileo/django-france,sam-tsai/django-old,mitsuhiko/django,disqus/django-old,skevy/django,jamespacileo/django-france,sam-tsai/django-old,django-nonrel/django-nonrel,Smarsh/django,mitsuhiko/django,skevy/django,jamespacileo/django-france,disqus/django-old
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date Use correct m2m join table name in LatestCommentsFeed git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
<commit_before>from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date <commit_msg>Use correct m2m join table name in LatestCommentsFeed git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date Use correct m2m join table name in LatestCommentsFeed git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
<commit_before>from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date <commit_msg>Use correct m2m join table name in LatestCommentsFeed git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>from django.conf import settings from django.contrib.syndication.feeds import Feed from django.contrib.sites.models import Site from django.contrib import comments class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"%s comments" % self._site.name def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return u"Latest comments on %s" % self._site.name def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
839d884d3dca3e799a235b1d2d69acf998f520f9
barsystem_base/management/commands/import_people.py
barsystem_base/management/commands/import_people.py
from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() print('Done')
from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person, Token class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() t = Token() t.type = 'ibutton' t.value = values['token'] t.person = p t.save() print('Done')
Add ibutton when importing old people
Add ibutton when importing old people
Python
mit
TkkrLab/barsystem,TkkrLab/barsystem,TkkrLab/barsystem
from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() print('Done')Add ibutton when importing old people
from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person, Token class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() t = Token() t.type = 'ibutton' t.value = values['token'] t.person = p t.save() print('Done')
<commit_before>from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() print('Done')<commit_msg>Add ibutton when importing old people<commit_after>
from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person, Token class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() t = Token() t.type = 'ibutton' t.value = values['token'] t.person = p t.save() print('Done')
from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() print('Done')Add ibutton when importing old peoplefrom django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person, Token class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() t = Token() t.type = 'ibutton' t.value = values['token'] t.person = p t.save() print('Done')
<commit_before>from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() print('Done')<commit_msg>Add ibutton when importing old people<commit_after>from django.core.management.base import BaseCommand, CommandError from barsystem_base.models import Person, Token class Command(BaseCommand): args = '<filename>' help = 'Import list of people' csv_columns = 'id,first_name,last_name,nick_name,amount,type,token'.split(',') def handle(self, *args, **kwargs): if len(args) == 0: raise CommandError('Please supply filename') with open(args[0], 'r') as f: columns = None for line in [line.strip().split(',') for line in f.readlines() if line[0] != '#']: # print(line) # take header if columns is None: columns = line continue values = dict(zip(columns, line)) values['active'] = values['type'] != 'hidden' try: p = Person.objects.get(id=values['id']) except Person.DoesNotExist: p = Person() for key, val in values.items(): if hasattr(p, key): setattr(p, key, val) print(p) p.save() t = Token() t.type = 'ibutton' t.value = values['token'] t.person = p t.save() print('Done')
17cbd84b9b5a4bd08123ff5f429be191b1bdf063
polynomial.py
polynomial.py
class Polynomial(object): def __init__(self): pass
class Polynomial(object): def __init__(self, coeffs): """ 1 parameter: coeff (list): coeff[n] = coefficient of nth degree term """ self.coeffs = coeffs @property def coeffs(self): return self._coeffs @property def degree(self): return len(self.coeffs) - 1 @coeffs.setter def coeffs(self, c): if not isinstance(c, list): raise TypeError("must provide list as arg") elif len(c) == 0: raise ValueError("arg length must be > 0") else: self._coeffs = c def main(): p1 = Polynomial([1]) # p = 1 p2 = Polynomial([2, 3, 4]) # p = 2 + 3x + 4x^2 assert(p1.degree == 0) assert(p2.degree == 2) assert(p2.coeffs == [2, 3, 4]) if __name__ == "__main__": main()
Add __init__, coeffs and degree attributes
Add __init__, coeffs and degree attributes
Python
mit
jackromo/mathLibPy
class Polynomial(object): def __init__(self): passAdd __init__, coeffs and degree attributes
class Polynomial(object): def __init__(self, coeffs): """ 1 parameter: coeff (list): coeff[n] = coefficient of nth degree term """ self.coeffs = coeffs @property def coeffs(self): return self._coeffs @property def degree(self): return len(self.coeffs) - 1 @coeffs.setter def coeffs(self, c): if not isinstance(c, list): raise TypeError("must provide list as arg") elif len(c) == 0: raise ValueError("arg length must be > 0") else: self._coeffs = c def main(): p1 = Polynomial([1]) # p = 1 p2 = Polynomial([2, 3, 4]) # p = 2 + 3x + 4x^2 assert(p1.degree == 0) assert(p2.degree == 2) assert(p2.coeffs == [2, 3, 4]) if __name__ == "__main__": main()
<commit_before> class Polynomial(object): def __init__(self): pass<commit_msg>Add __init__, coeffs and degree attributes<commit_after>
class Polynomial(object): def __init__(self, coeffs): """ 1 parameter: coeff (list): coeff[n] = coefficient of nth degree term """ self.coeffs = coeffs @property def coeffs(self): return self._coeffs @property def degree(self): return len(self.coeffs) - 1 @coeffs.setter def coeffs(self, c): if not isinstance(c, list): raise TypeError("must provide list as arg") elif len(c) == 0: raise ValueError("arg length must be > 0") else: self._coeffs = c def main(): p1 = Polynomial([1]) # p = 1 p2 = Polynomial([2, 3, 4]) # p = 2 + 3x + 4x^2 assert(p1.degree == 0) assert(p2.degree == 2) assert(p2.coeffs == [2, 3, 4]) if __name__ == "__main__": main()
class Polynomial(object): def __init__(self): passAdd __init__, coeffs and degree attributes class Polynomial(object): def __init__(self, coeffs): """ 1 parameter: coeff (list): coeff[n] = coefficient of nth degree term """ self.coeffs = coeffs @property def coeffs(self): return self._coeffs @property def degree(self): return len(self.coeffs) - 1 @coeffs.setter def coeffs(self, c): if not isinstance(c, list): raise TypeError("must provide list as arg") elif len(c) == 0: raise ValueError("arg length must be > 0") else: self._coeffs = c def main(): p1 = Polynomial([1]) # p = 1 p2 = Polynomial([2, 3, 4]) # p = 2 + 3x + 4x^2 assert(p1.degree == 0) assert(p2.degree == 2) assert(p2.coeffs == [2, 3, 4]) if __name__ == "__main__": main()
<commit_before> class Polynomial(object): def __init__(self): pass<commit_msg>Add __init__, coeffs and degree attributes<commit_after> class Polynomial(object): def __init__(self, coeffs): """ 1 parameter: coeff (list): coeff[n] = coefficient of nth degree term """ self.coeffs = coeffs @property def coeffs(self): return self._coeffs @property def degree(self): return len(self.coeffs) - 1 @coeffs.setter def coeffs(self, c): if not isinstance(c, list): raise TypeError("must provide list as arg") elif len(c) == 0: raise ValueError("arg length must be > 0") else: self._coeffs = c def main(): p1 = Polynomial([1]) # p = 1 p2 = Polynomial([2, 3, 4]) # p = 2 + 3x + 4x^2 assert(p1.degree == 0) assert(p2.degree == 2) assert(p2.coeffs == [2, 3, 4]) if __name__ == "__main__": main()
f41f9f9e562c6850d70ee17976c9dbb4aa3cca5f
pseudodata.py
pseudodata.py
class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names def get_labels(self): labels = [key['label'] for key in self.name_func_dict.keys()] return labels # class PseudoFunction(object): # def __init__(self, name, label, active): # self.name = name # self.label = label # self.active = active # def __call__(self): # return 10
class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names
Remove unused get_labels function and commented code
Remove unused get_labels function and commented code
Python
mit
mchels/FolderBrowser
class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names def get_labels(self): labels = [key['label'] for key in self.name_func_dict.keys()] return labels # class PseudoFunction(object): # def __init__(self, name, label, active): # self.name = name # self.label = label # self.active = active # def __call__(self): # return 10 Remove unused get_labels function and commented code
class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names
<commit_before>class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names def get_labels(self): labels = [key['label'] for key in self.name_func_dict.keys()] return labels # class PseudoFunction(object): # def __init__(self, name, label, active): # self.name = name # self.label = label # self.active = active # def __call__(self): # return 10 <commit_msg>Remove unused get_labels function and commented code<commit_after>
class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names
class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names def get_labels(self): labels = [key['label'] for key in self.name_func_dict.keys()] return labels # class PseudoFunction(object): # def __init__(self, name, label, active): # self.name = name # self.label = label # self.active = active # def __call__(self): # return 10 Remove unused get_labels function and commented codeclass PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names
<commit_before>class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names def get_labels(self): labels = [key['label'] for key in self.name_func_dict.keys()] return labels # class PseudoFunction(object): # def __init__(self, name, label, active): # self.name = name # self.label = label # self.active = active # def __call__(self): # return 10 <commit_msg>Remove unused get_labels function and commented code<commit_after>class PseudoData(dict): def __init__(self, name_func_dict, sweep): super(PseudoData, self).__init__() self.name_func_dict = name_func_dict self.sweep = sweep def __getitem__(self, key): if key in self.keys(): return dict.__getitem__(self, key) elif key in self.name_func_dict: func = self.name_func_dict[key]['func'] pcol = func(self.sweep.data, self.sweep.pdata, self.sweep.meta) self.__setitem__(key, pcol) return pcol else: return dict.__getitem__(self, key) def get_names(self): names = [k for k, v in self.name_func_dict.items() if 'func' in v] names.sort() return names
657e98bfa2f2b55da4449c8271a108bf4f193e05
examples/recognize_faces_in_pictures.py
examples/recognize_faces_in_pictures.py
import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results))
import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. try: biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] except IndexError: print("I wasn't able to locate any faces in at least one of the images. Check the image files. Aborting...") quit() known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results))
Handle bad image files in example
Handle bad image files in example
Python
mit
ageitgey/face_recognition
import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results)) Handle bad image files in example
import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. try: biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] except IndexError: print("I wasn't able to locate any faces in at least one of the images. Check the image files. Aborting...") quit() known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results))
<commit_before>import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results)) <commit_msg>Handle bad image files in example<commit_after>
import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. try: biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] except IndexError: print("I wasn't able to locate any faces in at least one of the images. Check the image files. Aborting...") quit() known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results))
import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results)) Handle bad image files in exampleimport face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. try: biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] except IndexError: print("I wasn't able to locate any faces in at least one of the images. Check the image files. Aborting...") quit() known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results))
<commit_before>import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results)) <commit_msg>Handle bad image files in example<commit_after>import face_recognition # Load the jpg files into numpy arrays biden_image = face_recognition.load_image_file("biden.jpg") obama_image = face_recognition.load_image_file("obama.jpg") unknown_image = face_recognition.load_image_file("obama2.jpg") # Get the face encodings for each face in each image file # Since there could be more than one face in each image, it returns a list of encodings. # But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0. try: biden_face_encoding = face_recognition.face_encodings(biden_image)[0] obama_face_encoding = face_recognition.face_encodings(obama_image)[0] unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0] except IndexError: print("I wasn't able to locate any faces in at least one of the images. Check the image files. Aborting...") quit() known_faces = [ biden_face_encoding, obama_face_encoding ] # results is an array of True/False telling if the unknown face matched anyone in the known_faces array results = face_recognition.compare_faces(known_faces, unknown_face_encoding) print("Is the unknown face a picture of Biden? {}".format(results[0])) print("Is the unknown face a picture of Obama? {}".format(results[1])) print("Is the unknown face a new person that we've never seen before? {}".format(not True in results))
6ddc8797035eba9f8c118e075e9b6d45081b3a9e
Functions/Connect.py
Functions/Connect.py
from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)==2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels)
from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)>=2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels)
Allow multiple channels in connect message
Allow multiple channels in connect message Still unicode-error, though? WHAT
Python
mit
HubbeKing/Hubbot_Twisted
from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)==2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels) Allow multiple channels in connect message Still unicode-error, though? WHAT
from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)>=2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels)
<commit_before>from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)==2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels) <commit_msg>Allow multiple channels in connect message Still unicode-error, though? WHAT<commit_after>
from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)>=2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels)
from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)==2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels) Allow multiple channels in connect message Still unicode-error, though? WHATfrom IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)>=2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels)
<commit_before>from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)==2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels) <commit_msg>Allow multiple channels in connect message Still unicode-error, though? WHAT<commit_after>from IRCMessage import IRCMessage from IRCResponse import IRCResponse, ResponseType from Function import Function import GlobalVars class Instantiate(Function): Help = "connect <server:port> <channel> - connect to a new server" def GetResponse(self, Hubbot, message): if message.Type != "PRIVMSG": return if message.Command == "connect" and message.User.Name in GlobalVars.admins: if len(message.ParameterList)>=2: server_with_port = message.ParameterList[0] server = server_with_port.split(":")[0] port = int(server_with_port.split(":")[1]) channels = message.ParameterList[1:] GlobalVars.bothandler.startBotFactory(server, port, channels)
60be218bb33d1d965a363b0e187ddcc88191b2d7
Lib/cluster/__init__.py
Lib/cluster/__init__.py
# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq
# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq from numpy.testing import NumpyTest test = NumpyTest().test
Add missing test definition in scipy.cluster
Add missing test definition in scipy.cluster
Python
bsd-3-clause
ortylp/scipy,jjhelmus/scipy,arokem/scipy,jsilter/scipy,lukauskas/scipy,jsilter/scipy,jseabold/scipy,pnedunuri/scipy,zerothi/scipy,jamestwebber/scipy,pschella/scipy,vhaasteren/scipy,fredrikw/scipy,vanpact/scipy,minhlongdo/scipy,FRidh/scipy,aman-iitj/scipy,argriffing/scipy,vberaudi/scipy,FRidh/scipy,behzadnouri/scipy,WillieMaddox/scipy,e-q/scipy,pyramania/scipy,piyush0609/scipy,mingwpy/scipy,chatcannon/scipy,gfyoung/scipy,witcxc/scipy,mtrbean/scipy,ilayn/scipy,behzadnouri/scipy,jakevdp/scipy,argriffing/scipy,pbrod/scipy,woodscn/scipy,Srisai85/scipy,surhudm/scipy,vberaudi/scipy,chatcannon/scipy,andim/scipy,person142/scipy,woodscn/scipy,dch312/scipy,haudren/scipy,jakevdp/scipy,grlee77/scipy,aarchiba/scipy,mikebenfield/scipy,vigna/scipy,nonhermitian/scipy,nmayorov/scipy,behzadnouri/scipy,perimosocordiae/scipy,efiring/scipy,andyfaff/scipy,behzadnouri/scipy,jor-/scipy,richardotis/scipy,anntzer/scipy,zaxliu/scipy,raoulbq/scipy,tylerjereddy/scipy,mhogg/scipy,zxsted/scipy,minhlongdo/scipy,ChanderG/scipy,pnedunuri/scipy,mdhaber/scipy,surhudm/scipy,ales-erjavec/scipy,endolith/scipy,andyfaff/scipy,njwilson23/scipy,matthew-brett/scipy,mortada/scipy,mortada/scipy,apbard/scipy,pbrod/scipy,pizzathief/scipy,gef756/scipy,zerothi/scipy,zerothi/scipy,rmcgibbo/scipy,dominicelse/scipy,hainm/scipy,pnedunuri/scipy,jonycgn/scipy,WarrenWeckesser/scipy,vanpact/scipy,mtrbean/scipy,Shaswat27/scipy,gertingold/scipy,dominicelse/scipy,zaxliu/scipy,woodscn/scipy,surhudm/scipy,jonycgn/scipy,jor-/scipy,apbard/scipy,Eric89GXL/scipy,gdooper/scipy,FRidh/scipy,FRidh/scipy,ilayn/scipy,grlee77/scipy,bkendzior/scipy,kleskjr/scipy,pyramania/scipy,pizzathief/scipy,mortada/scipy,raoulbq/scipy,e-q/scipy,scipy/scipy,kleskjr/scipy,jamestwebber/scipy,mdhaber/scipy,WillieMaddox/scipy,pbrod/scipy,gef756/scipy,andyfaff/scipy,lukauskas/scipy,lukauskas/scipy,haudren/scipy,tylerjereddy/scipy,scipy/scipy,jonycgn/scipy,kleskjr/scipy,juliantaylor/scipy,aarchiba/scipy,anntzer/scipy,jsilter/scipy,zxsted/scipy,witcxc/scipy,apbard/scipy,maniteja123/scipy,trankmichael/scipy,newemailjdm/scipy,nonhermitian/scipy,Gillu13/scipy,anielsen001/scipy,pnedunuri/scipy,jonycgn/scipy,sriki18/scipy,niknow/scipy,ogrisel/scipy,perimosocordiae/scipy,felipebetancur/scipy,mhogg/scipy,sauliusl/scipy,maciejkula/scipy,woodscn/scipy,pyramania/scipy,scipy/scipy,maniteja123/scipy,zxsted/scipy,mortonjt/scipy,richardotis/scipy,gdooper/scipy,WillieMaddox/scipy,Shaswat27/scipy,Newman101/scipy,kalvdans/scipy,futurulus/scipy,scipy/scipy,Srisai85/scipy,fernand/scipy,giorgiop/scipy,lhilt/scipy,pbrod/scipy,anielsen001/scipy,hainm/scipy,grlee77/scipy,sauliusl/scipy,Dapid/scipy,Shaswat27/scipy,raoulbq/scipy,jonycgn/scipy,aeklant/scipy,chatcannon/scipy,mgaitan/scipy,witcxc/scipy,e-q/scipy,rgommers/scipy,dominicelse/scipy,tylerjereddy/scipy,ales-erjavec/scipy,sonnyhu/scipy,ortylp/scipy,aarchiba/scipy,fredrikw/scipy,hainm/scipy,nonhermitian/scipy,scipy/scipy,gfyoung/scipy,zerothi/scipy,dominicelse/scipy,mortada/scipy,vigna/scipy,FRidh/scipy,nvoron23/scipy,pyramania/scipy,jakevdp/scipy,haudren/scipy,teoliphant/scipy,grlee77/scipy,Gillu13/scipy,zaxliu/scipy,Newman101/scipy,mortonjt/scipy,perimosocordiae/scipy,jseabold/scipy,chatcannon/scipy,nvoron23/scipy,behzadnouri/scipy,mortonjt/scipy,mikebenfield/scipy,anntzer/scipy,aarchiba/scipy,maciejkula/scipy,anielsen001/scipy,WillieMaddox/scipy,vhaasteren/scipy,matthewalbani/scipy,cpaulik/scipy,petebachant/scipy,perimosocordiae/scipy,pschella/scipy,newemailjdm/scipy,ilayn/scipy,fernand/scipy,newemailjdm/scipy,vhaasteren/scipy,josephcslater/scipy,felipebetancur/scipy,gdooper/scipy,efiring/scipy,ortylp/scipy,gfyoung/scipy,juliantaylor/scipy,anielsen001/scipy,endolith/scipy,raoulbq/scipy,aman-iitj/scipy,Kamp9/scipy,nmayorov/scipy,befelix/scipy,mdhaber/scipy,arokem/scipy,sargas/scipy,ChanderG/scipy,dominicelse/scipy,e-q/scipy,dch312/scipy,andyfaff/scipy,gdooper/scipy,richardotis/scipy,pbrod/scipy,piyush0609/scipy,petebachant/scipy,vberaudi/scipy,Dapid/scipy,e-q/scipy,sargas/scipy,jseabold/scipy,giorgiop/scipy,endolith/scipy,futurulus/scipy,juliantaylor/scipy,matthewalbani/scipy,vberaudi/scipy,ogrisel/scipy,rmcgibbo/scipy,cpaulik/scipy,person142/scipy,argriffing/scipy,maniteja123/scipy,chatcannon/scipy,sauliusl/scipy,vberaudi/scipy,gef756/scipy,sonnyhu/scipy,vigna/scipy,endolith/scipy,mgaitan/scipy,aman-iitj/scipy,befelix/scipy,trankmichael/scipy,cpaulik/scipy,Srisai85/scipy,surhudm/scipy,zaxliu/scipy,Newman101/scipy,maciejkula/scipy,mingwpy/scipy,sargas/scipy,Kamp9/scipy,matthew-brett/scipy,pschella/scipy,mortada/scipy,mhogg/scipy,apbard/scipy,minhlongdo/scipy,piyush0609/scipy,vigna/scipy,sriki18/scipy,gef756/scipy,nmayorov/scipy,ndchorley/scipy,zxsted/scipy,woodscn/scipy,kleskjr/scipy,bkendzior/scipy,zerothi/scipy,jakevdp/scipy,matthew-brett/scipy,mhogg/scipy,argriffing/scipy,jor-/scipy,mtrbean/scipy,mgaitan/scipy,apbard/scipy,felipebetancur/scipy,mingwpy/scipy,Kamp9/scipy,Stefan-Endres/scipy,nonhermitian/scipy,argriffing/scipy,nvoron23/scipy,aman-iitj/scipy,haudren/scipy,nvoron23/scipy,sargas/scipy,giorgiop/scipy,fredrikw/scipy,andim/scipy,ndchorley/scipy,josephcslater/scipy,mtrbean/scipy,kleskjr/scipy,Stefan-Endres/scipy,pschella/scipy,mingwpy/scipy,cpaulik/scipy,larsmans/scipy,kalvdans/scipy,endolith/scipy,rgommers/scipy,jor-/scipy,trankmichael/scipy,jor-/scipy,mgaitan/scipy,surhudm/scipy,chatcannon/scipy,larsmans/scipy,pizzathief/scipy,Kamp9/scipy,ndchorley/scipy,Eric89GXL/scipy,petebachant/scipy,ChanderG/scipy,vigna/scipy,rgommers/scipy,lukauskas/scipy,arokem/scipy,minhlongdo/scipy,WarrenWeckesser/scipy,mortonjt/scipy,mikebenfield/scipy,sauliusl/scipy,sargas/scipy,vhaasteren/scipy,lhilt/scipy,Srisai85/scipy,ogrisel/scipy,andyfaff/scipy,ChanderG/scipy,juliantaylor/scipy,behzadnouri/scipy,jjhelmus/scipy,sonnyhu/scipy,nmayorov/scipy,kalvdans/scipy,richardotis/scipy,Eric89GXL/scipy,niknow/scipy,Eric89GXL/scipy,jamestwebber/scipy,jsilter/scipy,pizzathief/scipy,anntzer/scipy,anntzer/scipy,hainm/scipy,aeklant/scipy,Dapid/scipy,anielsen001/scipy,gef756/scipy,lhilt/scipy,tylerjereddy/scipy,vberaudi/scipy,matthew-brett/scipy,newemailjdm/scipy,arokem/scipy,teoliphant/scipy,gdooper/scipy,piyush0609/scipy,mgaitan/scipy,sriki18/scipy,fernand/scipy,minhlongdo/scipy,vanpact/scipy,grlee77/scipy,niknow/scipy,pyramania/scipy,petebachant/scipy,mdhaber/scipy,ortylp/scipy,zaxliu/scipy,bkendzior/scipy,matthew-brett/scipy,trankmichael/scipy,cpaulik/scipy,aeklant/scipy,rmcgibbo/scipy,Newman101/scipy,lukauskas/scipy,felipebetancur/scipy,lukauskas/scipy,rgommers/scipy,mgaitan/scipy,haudren/scipy,jamestwebber/scipy,mtrbean/scipy,anntzer/scipy,aeklant/scipy,aeklant/scipy,jakevdp/scipy,efiring/scipy,njwilson23/scipy,juliantaylor/scipy,mortonjt/scipy,vhaasteren/scipy,mtrbean/scipy,larsmans/scipy,ndchorley/scipy,lhilt/scipy,Gillu13/scipy,Eric89GXL/scipy,Shaswat27/scipy,Eric89GXL/scipy,giorgiop/scipy,niknow/scipy,dch312/scipy,njwilson23/scipy,njwilson23/scipy,fredrikw/scipy,Gillu13/scipy,maniteja123/scipy,ChanderG/scipy,futurulus/scipy,ales-erjavec/scipy,FRidh/scipy,sriki18/scipy,newemailjdm/scipy,teoliphant/scipy,mortonjt/scipy,josephcslater/scipy,hainm/scipy,jsilter/scipy,aman-iitj/scipy,vanpact/scipy,matthewalbani/scipy,giorgiop/scipy,befelix/scipy,andim/scipy,gertingold/scipy,jseabold/scipy,niknow/scipy,befelix/scipy,zxsted/scipy,futurulus/scipy,endolith/scipy,sauliusl/scipy,Shaswat27/scipy,fernand/scipy,mingwpy/scipy,dch312/scipy,ogrisel/scipy,mingwpy/scipy,mikebenfield/scipy,trankmichael/scipy,mdhaber/scipy,matthewalbani/scipy,Stefan-Endres/scipy,pnedunuri/scipy,pschella/scipy,lhilt/scipy,teoliphant/scipy,njwilson23/scipy,dch312/scipy,ndchorley/scipy,Dapid/scipy,WillieMaddox/scipy,mhogg/scipy,efiring/scipy,ndchorley/scipy,gertingold/scipy,matthewalbani/scipy,newemailjdm/scipy,jamestwebber/scipy,rgommers/scipy,scipy/scipy,ales-erjavec/scipy,fernand/scipy,ogrisel/scipy,pnedunuri/scipy,Newman101/scipy,argriffing/scipy,sonnyhu/scipy,aman-iitj/scipy,woodscn/scipy,ilayn/scipy,bkendzior/scipy,larsmans/scipy,niknow/scipy,kalvdans/scipy,josephcslater/scipy,jjhelmus/scipy,ales-erjavec/scipy,felipebetancur/scipy,maniteja123/scipy,mortada/scipy,WarrenWeckesser/scipy,futurulus/scipy,zaxliu/scipy,maciejkula/scipy,gfyoung/scipy,Stefan-Endres/scipy,ales-erjavec/scipy,haudren/scipy,perimosocordiae/scipy,petebachant/scipy,sriki18/scipy,nmayorov/scipy,witcxc/scipy,josephcslater/scipy,fredrikw/scipy,sonnyhu/scipy,sauliusl/scipy,raoulbq/scipy,fernand/scipy,richardotis/scipy,zerothi/scipy,WarrenWeckesser/scipy,vhaasteren/scipy,mdhaber/scipy,bkendzior/scipy,Srisai85/scipy,ChanderG/scipy,Kamp9/scipy,gertingold/scipy,piyush0609/scipy,ilayn/scipy,vanpact/scipy,giorgiop/scipy,WarrenWeckesser/scipy,vanpact/scipy,njwilson23/scipy,rmcgibbo/scipy,gef756/scipy,felipebetancur/scipy,person142/scipy,Dapid/scipy,WillieMaddox/scipy,efiring/scipy,ilayn/scipy,jjhelmus/scipy,piyush0609/scipy,ortylp/scipy,andim/scipy,arokem/scipy,anielsen001/scipy,gfyoung/scipy,witcxc/scipy,Newman101/scipy,befelix/scipy,andim/scipy,pizzathief/scipy,Gillu13/scipy,Kamp9/scipy,person142/scipy,WarrenWeckesser/scipy,gertingold/scipy,Stefan-Endres/scipy,efiring/scipy,nvoron23/scipy,minhlongdo/scipy,cpaulik/scipy,hainm/scipy,richardotis/scipy,Dapid/scipy,futurulus/scipy,ortylp/scipy,jjhelmus/scipy,mikebenfield/scipy,mhogg/scipy,person142/scipy,sonnyhu/scipy,larsmans/scipy,nvoron23/scipy,Gillu13/scipy,kalvdans/scipy,aarchiba/scipy,nonhermitian/scipy,Stefan-Endres/scipy,kleskjr/scipy,petebachant/scipy,rmcgibbo/scipy,trankmichael/scipy,maciejkula/scipy,perimosocordiae/scipy,raoulbq/scipy,jseabold/scipy,rmcgibbo/scipy,Srisai85/scipy,andim/scipy,pbrod/scipy,andyfaff/scipy,zxsted/scipy,Shaswat27/scipy,sriki18/scipy,larsmans/scipy,jseabold/scipy,jonycgn/scipy,maniteja123/scipy,fredrikw/scipy,surhudm/scipy,teoliphant/scipy,tylerjereddy/scipy
# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq Add missing test definition in scipy.cluster
# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq from numpy.testing import NumpyTest test = NumpyTest().test
<commit_before># # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq <commit_msg>Add missing test definition in scipy.cluster<commit_after>
# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq from numpy.testing import NumpyTest test = NumpyTest().test
# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq Add missing test definition in scipy.cluster# # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq from numpy.testing import NumpyTest test = NumpyTest().test
<commit_before># # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq <commit_msg>Add missing test definition in scipy.cluster<commit_after># # cluster - Vector Quantization / Kmeans # from info import __doc__ __all__ = ['vq'] import vq from numpy.testing import NumpyTest test = NumpyTest().test
0b1813bef37819209ed9fb5b06eb7495d0e0e1fb
netmiko/arista/arista_ssh.py
netmiko/arista/arista_ssh.py
from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass
import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
Improve Arista reliability on slow login
Improve Arista reliability on slow login
Python
mit
fooelisa/netmiko,ktbyers/netmiko,shamanu4/netmiko,ktbyers/netmiko,shamanu4/netmiko,shsingh/netmiko,shsingh/netmiko,isidroamv/netmiko,fooelisa/netmiko,isidroamv/netmiko
from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass Improve Arista reliability on slow login
import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
<commit_before>from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass <commit_msg>Improve Arista reliability on slow login<commit_after>
import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass Improve Arista reliability on slow loginimport time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
<commit_before>from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass <commit_msg>Improve Arista reliability on slow login<commit_after>import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
c97153e9d91af27713afce506bc658daa6b1a0e2
docs/manual/docsmanage.py
docs/manual/docsmanage.py
#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_manager os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_manager(settings)
#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_from_command_line os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_from_command_line()
Fix building the manual with Django 1.6.
Fix building the manual with Django 1.6. This is a trivial change that just switches from calling execute_manager to execute_from_command_line, in order to build again on Django 1.6.
Python
mit
chipx86/reviewboard,custode/reviewboard,KnowNo/reviewboard,bkochendorfer/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,sgallagher/reviewboard,bkochendorfer/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,davidt/reviewboard,brennie/reviewboard,KnowNo/reviewboard,custode/reviewboard,1tush/reviewboard,KnowNo/reviewboard,1tush/reviewboard,davidt/reviewboard,1tush/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,KnowNo/reviewboard,brennie/reviewboard,davidt/reviewboard,beol/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,1tush/reviewboard,custode/reviewboard,1tush/reviewboard,brennie/reviewboard,beol/reviewboard,1tush/reviewboard,davidt/reviewboard,sgallagher/reviewboard,chipx86/reviewboard,custode/reviewboard,1tush/reviewboard,reviewboard/reviewboard,brennie/reviewboard,beol/reviewboard,1tush/reviewboard,sgallagher/reviewboard
#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_manager os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_manager(settings) Fix building the manual with Django 1.6. This is a trivial change that just switches from calling execute_manager to execute_from_command_line, in order to build again on Django 1.6.
#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_from_command_line os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_from_command_line()
<commit_before>#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_manager os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_manager(settings) <commit_msg>Fix building the manual with Django 1.6. This is a trivial change that just switches from calling execute_manager to execute_from_command_line, in order to build again on Django 1.6.<commit_after>
#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_from_command_line os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_from_command_line()
#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_manager os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_manager(settings) Fix building the manual with Django 1.6. This is a trivial change that just switches from calling execute_manager to execute_from_command_line, in order to build again on Django 1.6.#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_from_command_line os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_from_command_line()
<commit_before>#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_manager os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_manager(settings) <commit_msg>Fix building the manual with Django 1.6. This is a trivial change that just switches from calling execute_manager to execute_from_command_line, in order to build again on Django 1.6.<commit_after>#!/usr/bin/env python import os import sys sys.path.insert(0, os.path.join(__file__, "..", "..")) sys.path.insert(0, os.path.dirname(__file__)) from reviewboard import settings from django.core.management import execute_from_command_line os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings') def scan_resource(resource): for child in resource.item_child_resources: scan_resource(child) for child in resource.list_child_resources: scan_resource(child) if __name__ == "__main__": execute_from_command_line()
e86d2338daa67b2d5e84b62d15b44b0a897b9c93
dimod/package_info.py
dimod/package_info.py
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev4' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev5' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.9.0.dev4 -> 0.9.0.dev5
Update version 0.9.0.dev4 -> 0.9.0.dev5 Changes ------- * `BinaryQuadraticModel` now subclasses `AdjDictBQM` * Remove `BinaryQuadraticModel.to_coo` and `BinaryQuadraticModel.from_coo` * Remove `BinaryQuadraticModel.to_numpy_matrix` and `BinaryQuadraticModel.from_numpy_matrix` * Remove `BinaryQuadraticModel.SPIN` and `BinaryQuadraticModel.BINARY` * Remove `vartype` keyword argument from `BinaryQuadraticModel.add_variable`, `BinaryQuadraticModel.add_variables`, `BinaryQuadraticModel.add_interaction` and `BinaryQuadraticModel.add_interactions` * The `.spin` and `.binary` properties of binary quadratic models are now views rather than copies New Features ------------ * `as_bqm` function for constructing binary quadratic models * Most `BinaryQuadraticModel` methods and properties have been added to `AdjArrayBQM`, `AdjDictBQM`, `AdjMapBQM` and `AdjVectorBQM`
Python
apache-2.0
dwavesystems/dimod,dwavesystems/dimod
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev4' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' Update version 0.9.0.dev4 -> 0.9.0.dev5 Changes ------- * `BinaryQuadraticModel` now subclasses `AdjDictBQM` * Remove `BinaryQuadraticModel.to_coo` and `BinaryQuadraticModel.from_coo` * Remove `BinaryQuadraticModel.to_numpy_matrix` and `BinaryQuadraticModel.from_numpy_matrix` * Remove `BinaryQuadraticModel.SPIN` and `BinaryQuadraticModel.BINARY` * Remove `vartype` keyword argument from `BinaryQuadraticModel.add_variable`, `BinaryQuadraticModel.add_variables`, `BinaryQuadraticModel.add_interaction` and `BinaryQuadraticModel.add_interactions` * The `.spin` and `.binary` properties of binary quadratic models are now views rather than copies New Features ------------ * `as_bqm` function for constructing binary quadratic models * Most `BinaryQuadraticModel` methods and properties have been added to `AdjArrayBQM`, `AdjDictBQM`, `AdjMapBQM` and `AdjVectorBQM`
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev5' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
<commit_before># Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev4' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' <commit_msg>Update version 0.9.0.dev4 -> 0.9.0.dev5 Changes ------- * `BinaryQuadraticModel` now subclasses `AdjDictBQM` * Remove `BinaryQuadraticModel.to_coo` and `BinaryQuadraticModel.from_coo` * Remove `BinaryQuadraticModel.to_numpy_matrix` and `BinaryQuadraticModel.from_numpy_matrix` * Remove `BinaryQuadraticModel.SPIN` and `BinaryQuadraticModel.BINARY` * Remove `vartype` keyword argument from `BinaryQuadraticModel.add_variable`, `BinaryQuadraticModel.add_variables`, `BinaryQuadraticModel.add_interaction` and `BinaryQuadraticModel.add_interactions` * The `.spin` and `.binary` properties of binary quadratic models are now views rather than copies New Features ------------ * `as_bqm` function for constructing binary quadratic models * Most `BinaryQuadraticModel` methods and properties have been added to `AdjArrayBQM`, `AdjDictBQM`, `AdjMapBQM` and `AdjVectorBQM`<commit_after>
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev5' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev4' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' Update version 0.9.0.dev4 -> 0.9.0.dev5 Changes ------- * `BinaryQuadraticModel` now subclasses `AdjDictBQM` * Remove `BinaryQuadraticModel.to_coo` and `BinaryQuadraticModel.from_coo` * Remove `BinaryQuadraticModel.to_numpy_matrix` and `BinaryQuadraticModel.from_numpy_matrix` * Remove `BinaryQuadraticModel.SPIN` and `BinaryQuadraticModel.BINARY` * Remove `vartype` keyword argument from `BinaryQuadraticModel.add_variable`, `BinaryQuadraticModel.add_variables`, `BinaryQuadraticModel.add_interaction` and `BinaryQuadraticModel.add_interactions` * The `.spin` and `.binary` properties of binary quadratic models are now views rather than copies New Features ------------ * `as_bqm` function for constructing binary quadratic models * Most `BinaryQuadraticModel` methods and properties have been added to `AdjArrayBQM`, `AdjDictBQM`, `AdjMapBQM` and `AdjVectorBQM`# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev5' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
<commit_before># Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev4' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' <commit_msg>Update version 0.9.0.dev4 -> 0.9.0.dev5 Changes ------- * `BinaryQuadraticModel` now subclasses `AdjDictBQM` * Remove `BinaryQuadraticModel.to_coo` and `BinaryQuadraticModel.from_coo` * Remove `BinaryQuadraticModel.to_numpy_matrix` and `BinaryQuadraticModel.from_numpy_matrix` * Remove `BinaryQuadraticModel.SPIN` and `BinaryQuadraticModel.BINARY` * Remove `vartype` keyword argument from `BinaryQuadraticModel.add_variable`, `BinaryQuadraticModel.add_variables`, `BinaryQuadraticModel.add_interaction` and `BinaryQuadraticModel.add_interactions` * The `.spin` and `.binary` properties of binary quadratic models are now views rather than copies New Features ------------ * `as_bqm` function for constructing binary quadratic models * Most `BinaryQuadraticModel` methods and properties have been added to `AdjArrayBQM`, `AdjDictBQM`, `AdjMapBQM` and `AdjVectorBQM`<commit_after># Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ================================================================================================ __version__ = '0.9.0.dev5' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
d4f73eeccd30d884d0bd8a52ad1798b6f8a1366d
test/test-git-sap-lib.py
test/test-git-sap-lib.py
from git.repo.base import Repo def test_open_repo(): assert len(Repo().branches) > 0
from git.repo.base import Repo from unittest import TestCase class TestSequenceFunctions(TestCase): def test_open_repo(self): self.assertTrue(len(Repo().branches) > 0)
Switch over to pyunit from py.test
Switch over to pyunit from py.test
Python
apache-2.0
Yasumoto/sapling,jsirois/sapling,jsirois/sapling,Yasumoto/sapling
from git.repo.base import Repo def test_open_repo(): assert len(Repo().branches) > 0 Switch over to pyunit from py.test
from git.repo.base import Repo from unittest import TestCase class TestSequenceFunctions(TestCase): def test_open_repo(self): self.assertTrue(len(Repo().branches) > 0)
<commit_before>from git.repo.base import Repo def test_open_repo(): assert len(Repo().branches) > 0 <commit_msg>Switch over to pyunit from py.test<commit_after>
from git.repo.base import Repo from unittest import TestCase class TestSequenceFunctions(TestCase): def test_open_repo(self): self.assertTrue(len(Repo().branches) > 0)
from git.repo.base import Repo def test_open_repo(): assert len(Repo().branches) > 0 Switch over to pyunit from py.testfrom git.repo.base import Repo from unittest import TestCase class TestSequenceFunctions(TestCase): def test_open_repo(self): self.assertTrue(len(Repo().branches) > 0)
<commit_before>from git.repo.base import Repo def test_open_repo(): assert len(Repo().branches) > 0 <commit_msg>Switch over to pyunit from py.test<commit_after>from git.repo.base import Repo from unittest import TestCase class TestSequenceFunctions(TestCase): def test_open_repo(self): self.assertTrue(len(Repo().branches) > 0)
2124f859a74d4a3e00524af62d1122796804f048
test_utils/assertions.py
test_utils/assertions.py
""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list, dict)): first = [pformat(d) for d in first] else: first = [pformat(first)] if isinstance(second, (tuple, list, dict)): second = [pformat(d) for d in second] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg
""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list)): first = [pformat(d) for d in first] elif isinstance(first, dict): first = ["%s:%s" % (pformat(key), pformat(val)) for key,val in first.iteritems()] else: first = [pformat(first)] if isinstance(second, (tuple, list)): second = [pformat(d) for d in second] elif isinstance(second, dict): second = ["%s:%s" % (pformat(key), pformat(val)) for key,val in second.iteritems()] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return '\n' + ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg
Make failIfDiff work with dict keys and values.
Make failIfDiff work with dict keys and values.
Python
mit
frac/django-test-utils,ericholscher/django-test-utils,frac/django-test-utils,acdha/django-test-utils,acdha/django-test-utils,ericholscher/django-test-utils
""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list, dict)): first = [pformat(d) for d in first] else: first = [pformat(first)] if isinstance(second, (tuple, list, dict)): second = [pformat(d) for d in second] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg Make failIfDiff work with dict keys and values.
""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list)): first = [pformat(d) for d in first] elif isinstance(first, dict): first = ["%s:%s" % (pformat(key), pformat(val)) for key,val in first.iteritems()] else: first = [pformat(first)] if isinstance(second, (tuple, list)): second = [pformat(d) for d in second] elif isinstance(second, dict): second = ["%s:%s" % (pformat(key), pformat(val)) for key,val in second.iteritems()] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return '\n' + ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg
<commit_before>""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list, dict)): first = [pformat(d) for d in first] else: first = [pformat(first)] if isinstance(second, (tuple, list, dict)): second = [pformat(d) for d in second] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg <commit_msg>Make failIfDiff work with dict keys and values.<commit_after>
""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list)): first = [pformat(d) for d in first] elif isinstance(first, dict): first = ["%s:%s" % (pformat(key), pformat(val)) for key,val in first.iteritems()] else: first = [pformat(first)] if isinstance(second, (tuple, list)): second = [pformat(d) for d in second] elif isinstance(second, dict): second = ["%s:%s" % (pformat(key), pformat(val)) for key,val in second.iteritems()] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return '\n' + ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg
""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list, dict)): first = [pformat(d) for d in first] else: first = [pformat(first)] if isinstance(second, (tuple, list, dict)): second = [pformat(d) for d in second] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg Make failIfDiff work with dict keys and values.""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list)): first = [pformat(d) for d in first] elif isinstance(first, dict): first = ["%s:%s" % (pformat(key), pformat(val)) for key,val in first.iteritems()] else: first = [pformat(first)] if isinstance(second, (tuple, list)): second = [pformat(d) for d in second] elif isinstance(second, dict): second = ["%s:%s" % (pformat(key), pformat(val)) for key,val in second.iteritems()] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return '\n' + ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg
<commit_before>""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list, dict)): first = [pformat(d) for d in first] else: first = [pformat(first)] if isinstance(second, (tuple, list, dict)): second = [pformat(d) for d in second] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg <commit_msg>Make failIfDiff work with dict keys and values.<commit_after>""" Code originally from: http://www.aminus.org/blogs/index.php/2009/01/09/assertnodiff """ import difflib from pprint import pformat class DiffTestCaseMixin(object): def get_diff_msg(self, first, second, fromfile='First', tofile='Second'): """Return a unified diff between first and second.""" # Force inputs to iterables for diffing. # use pformat instead of str or repr to output dicts and such # in a stable order for comparison. if isinstance(first, (tuple, list)): first = [pformat(d) for d in first] elif isinstance(first, dict): first = ["%s:%s" % (pformat(key), pformat(val)) for key,val in first.iteritems()] else: first = [pformat(first)] if isinstance(second, (tuple, list)): second = [pformat(d) for d in second] elif isinstance(second, dict): second = ["%s:%s" % (pformat(key), pformat(val)) for key,val in second.iteritems()] else: second = [pformat(second)] diff = difflib.unified_diff( first, second, fromfile=fromfile, tofile=tofile) # Add line endings. return '\n' + ''.join([d + '\n' for d in diff]) def failIfDiff(self, first, second, fromfile='First', tofile='Second'): """If not first == second, fail with a unified diff.""" if not first == second: msg = self.get_diff_msg(first, second, fromfile, tofile) raise self.failureException, msg
543cfa32d82417efe63f38a20037105b7e313e5d
cms/project_template/project_name/settings/local.py
cms/project_template/project_name/settings/local.py
""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = '2525' EMAIL_USE_TLS = True
""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '178288370161874a6' EMAIL_HOST_PASSWORD = '5033a6d5bca3f0' EMAIL_PORT = '2525' EMAIL_USE_TLS = True
Update default testing SMTP settings in project template
Update default testing SMTP settings in project template
Python
bsd-3-clause
lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,danielsamuels/cms,lewiscollard/cms,dan-gamble/cms,danielsamuels/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,jamesfoley/cms,danielsamuels/cms
""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = '2525' EMAIL_USE_TLS = True Update default testing SMTP settings in project template
""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '178288370161874a6' EMAIL_HOST_PASSWORD = '5033a6d5bca3f0' EMAIL_PORT = '2525' EMAIL_USE_TLS = True
<commit_before>""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = '2525' EMAIL_USE_TLS = True <commit_msg>Update default testing SMTP settings in project template<commit_after>
""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '178288370161874a6' EMAIL_HOST_PASSWORD = '5033a6d5bca3f0' EMAIL_PORT = '2525' EMAIL_USE_TLS = True
""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = '2525' EMAIL_USE_TLS = True Update default testing SMTP settings in project template""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '178288370161874a6' EMAIL_HOST_PASSWORD = '5033a6d5bca3f0' EMAIL_PORT = '2525' EMAIL_USE_TLS = True
<commit_before>""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = '2525' EMAIL_USE_TLS = True <commit_msg>Update default testing SMTP settings in project template<commit_after>""" Settings for local development. These settings are not fast or efficient, but allow local servers to be run using the django-admin.py utility. This file should be excluded from version control to keep the settings local. """ import os import os.path from .base import * # Run in debug mode. DEBUG = True TEMPLATE_DEBUG = DEBUG # Save media files to the user's Sites folder. MEDIA_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "media")) STATIC_ROOT = os.path.expanduser(os.path.join("~/Sites", SITE_DOMAIN, "static")) # Use local server. SITE_DOMAIN = "localhost:8000" PREPEND_WWW = False # Disable the template cache for development. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) # Optional separate database settings DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "HOST": "localhost", "NAME": "{{ project_name }}", "USER": os.getlogin(), "PASSWORD": "", }, } # Mailtrip SMTP EMAIL_HOST = 'mailtrap.io' EMAIL_HOST_USER = '178288370161874a6' EMAIL_HOST_PASSWORD = '5033a6d5bca3f0' EMAIL_PORT = '2525' EMAIL_USE_TLS = True
6e013558940671257cd21972d755564faba38c5c
src/sentry/web/frontend/generic.py
src/sentry/web/frontend/generic.py
""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.eot', '.ttf', '.woff', '.js')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs )
""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.js' '.ttf', '.ttc', '.otf', '.eot', '.woff', '.woff2')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs )
Cover more font extensions for CORS from static media
Cover more font extensions for CORS from static media We were missing woff2, but this should just future proof us if more are added.
Python
bsd-3-clause
mvaled/sentry,looker/sentry,ifduyue/sentry,jean/sentry,BuildingLink/sentry,mvaled/sentry,alexm92/sentry,ifduyue/sentry,fotinakis/sentry,JackDanger/sentry,gencer/sentry,jean/sentry,gencer/sentry,alexm92/sentry,zenefits/sentry,nicholasserra/sentry,jean/sentry,mvaled/sentry,fotinakis/sentry,ifduyue/sentry,jean/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,BuildingLink/sentry,gencer/sentry,fotinakis/sentry,looker/sentry,mitsuhiko/sentry,mitsuhiko/sentry,beeftornado/sentry,looker/sentry,mvaled/sentry,nicholasserra/sentry,JamesMura/sentry,looker/sentry,alexm92/sentry,BuildingLink/sentry,gencer/sentry,daevaorn/sentry,JamesMura/sentry,jean/sentry,ifduyue/sentry,fotinakis/sentry,beeftornado/sentry,JackDanger/sentry,ifduyue/sentry,JamesMura/sentry,zenefits/sentry,daevaorn/sentry,nicholasserra/sentry,mvaled/sentry,gencer/sentry,BuildingLink/sentry,daevaorn/sentry,daevaorn/sentry,zenefits/sentry,JamesMura/sentry,zenefits/sentry,looker/sentry,mvaled/sentry,zenefits/sentry,JackDanger/sentry
""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.eot', '.ttf', '.woff', '.js')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs ) Cover more font extensions for CORS from static media We were missing woff2, but this should just future proof us if more are added.
""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.js' '.ttf', '.ttc', '.otf', '.eot', '.woff', '.woff2')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs )
<commit_before>""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.eot', '.ttf', '.woff', '.js')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs ) <commit_msg>Cover more font extensions for CORS from static media We were missing woff2, but this should just future proof us if more are added.<commit_after>
""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.js' '.ttf', '.ttc', '.otf', '.eot', '.woff', '.woff2')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs )
""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.eot', '.ttf', '.woff', '.js')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs ) Cover more font extensions for CORS from static media We were missing woff2, but this should just future proof us if more are added.""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.js' '.ttf', '.ttc', '.otf', '.eot', '.woff', '.woff2')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs )
<commit_before>""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.eot', '.ttf', '.woff', '.js')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs ) <commit_msg>Cover more font extensions for CORS from static media We were missing woff2, but this should just future proof us if more are added.<commit_after>""" sentry.web.frontend.generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.views.generic import TemplateView as BaseTemplateView from sentry.web.helpers import render_to_response def static_media(request, **kwargs): """ Serve static files below a given point in the directory structure. """ from django.contrib.staticfiles.views import serve module = kwargs.get('module') path = kwargs.get('path', '') version = kwargs.get('version') if module: path = '%s/%s' % (module, path) response = serve(request, path, insecure=True) # We need CORS for font files if path.endswith(('.js' '.ttf', '.ttc', '.otf', '.eot', '.woff', '.woff2')): response['Access-Control-Allow-Origin'] = '*' # If we have a version, we can cache it FOREVER if version is not None: response['Cache-Control'] = 'max-age=315360000' return response class TemplateView(BaseTemplateView): def render_to_response(self, context, **response_kwargs): return render_to_response( request=self.request, template=self.get_template_names(), context=context, **response_kwargs )
06a8567ab538e9de510d34a686dabade8f6a8dc9
base/components/correlations/managers.py
base/components/correlations/managers.py
# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday)
# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs #.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday)
Remove the blind addition of prefech_related() to all Correlation querysets.
Remove the blind addition of prefech_related() to all Correlation querysets.
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday) Remove the blind addition of prefech_related() to all Correlation querysets.
# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs #.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday)
<commit_before># -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday) <commit_msg>Remove the blind addition of prefech_related() to all Correlation querysets.<commit_after>
# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs #.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday)
# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday) Remove the blind addition of prefech_related() to all Correlation querysets.# -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs #.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday)
<commit_before># -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday) <commit_msg>Remove the blind addition of prefech_related() to all Correlation querysets.<commit_after># -*- coding: utf-8 -*- from datetime import date from django.contrib.contenttypes.models import ContentType from django.db import models class CorrelationManager(models.Manager): def update_or_create(self, instance, timestamp, attribute): # Membership is a special case. Since most groups are static # (or non-generational), the date the group is formed is the same as # the date its members joined. So if those two values are equal, stop # the process. if not timestamp or (instance._meta.model_name == 'membership' and instance.started == instance.group.started): return ctype = ContentType.objects.get_for_model(instance.sender) defaults = { 'timestamp': timestamp, 'julian': timestamp.timetuple().tm_yday, 'year': timestamp.year, 'month': timestamp.month, 'day': timestamp.day, } correlation, created = self.get_or_create( content_type=ctype, object_id=instance._get_pk_val(), identifier=instance._meta.model_name, date_field=attribute, defaults=defaults ) for key, value in defaults.iteritems(): setattr(correlation, key, value) correlation.save() return def get_query_set(self): qs = super(CorrelationManager, self).get_query_set() return qs #.prefetch_related('content_object') def today(self): qs = self.get_query_set() return qs.filter(julian=date.today().timetuple().tm_yday)
d3160598898702d750a3c7fa1910bb8655abcb3f
kay/management/app_template/urls.py
kay/management/app_template/urls.py
# -*- coding: utf-8 -*- # %app_name%.urls from werkzeug.routing import ( Map, Rule, Submount, EndpointPrefix, RuleTemplate, ) def make_rules(): return [ EndpointPrefix('%app_name%/', [ Rule('/', endpoint='index'), ]), ] all_views = { '%app_name%/index': '%app_name%.views.index', }
# -*- coding: utf-8 -*- # %app_name%.urls from kay.view_group import ( ViewGroup, URL ) view_groups = [ ViewGroup(URL('/', endpoint='index', view='%app_name%.views.index')) ]
Use a new interface for urlmapping in application template.
Use a new interface for urlmapping in application template.
Python
bsd-3-clause
IanLewis/kay,IanLewis/kay,IanLewis/kay,IanLewis/kay
# -*- coding: utf-8 -*- # %app_name%.urls from werkzeug.routing import ( Map, Rule, Submount, EndpointPrefix, RuleTemplate, ) def make_rules(): return [ EndpointPrefix('%app_name%/', [ Rule('/', endpoint='index'), ]), ] all_views = { '%app_name%/index': '%app_name%.views.index', } Use a new interface for urlmapping in application template.
# -*- coding: utf-8 -*- # %app_name%.urls from kay.view_group import ( ViewGroup, URL ) view_groups = [ ViewGroup(URL('/', endpoint='index', view='%app_name%.views.index')) ]
<commit_before># -*- coding: utf-8 -*- # %app_name%.urls from werkzeug.routing import ( Map, Rule, Submount, EndpointPrefix, RuleTemplate, ) def make_rules(): return [ EndpointPrefix('%app_name%/', [ Rule('/', endpoint='index'), ]), ] all_views = { '%app_name%/index': '%app_name%.views.index', } <commit_msg>Use a new interface for urlmapping in application template.<commit_after>
# -*- coding: utf-8 -*- # %app_name%.urls from kay.view_group import ( ViewGroup, URL ) view_groups = [ ViewGroup(URL('/', endpoint='index', view='%app_name%.views.index')) ]
# -*- coding: utf-8 -*- # %app_name%.urls from werkzeug.routing import ( Map, Rule, Submount, EndpointPrefix, RuleTemplate, ) def make_rules(): return [ EndpointPrefix('%app_name%/', [ Rule('/', endpoint='index'), ]), ] all_views = { '%app_name%/index': '%app_name%.views.index', } Use a new interface for urlmapping in application template.# -*- coding: utf-8 -*- # %app_name%.urls from kay.view_group import ( ViewGroup, URL ) view_groups = [ ViewGroup(URL('/', endpoint='index', view='%app_name%.views.index')) ]
<commit_before># -*- coding: utf-8 -*- # %app_name%.urls from werkzeug.routing import ( Map, Rule, Submount, EndpointPrefix, RuleTemplate, ) def make_rules(): return [ EndpointPrefix('%app_name%/', [ Rule('/', endpoint='index'), ]), ] all_views = { '%app_name%/index': '%app_name%.views.index', } <commit_msg>Use a new interface for urlmapping in application template.<commit_after># -*- coding: utf-8 -*- # %app_name%.urls from kay.view_group import ( ViewGroup, URL ) view_groups = [ ViewGroup(URL('/', endpoint='index', view='%app_name%.views.index')) ]
173ee66df6a45c979df599422efc3f35bb41a243
neuroshare/EventEntity.py
neuroshare/EventEntity.py
from Entity import * class EventEntity(Entity): EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): lib = self.file.library data = lib._get_event_data (self, index) return data
from Entity import * class EventEntity(Entity): """Event entities represent specific timepoints with associated data, e.g. a trigger events. Data can be binary (8, 16 or 32 bit) values, text or comma separated values (cvs). """ EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): """The type of the event: * binary (8, 16, 32 bit) [``EVENT_BYTE, EVENT_WORD, EVENT_DWORD``] * text [``EVENT_TEXT``] * comma separated values (csv) [``EVENT_CSV``] """ return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): """Retrieve the data at ``index``""" lib = self.file.library data = lib._get_event_data (self, index) return data
Add simple docs to Event entities
doc: Add simple docs to Event entities
Python
lgpl-2.1
abhay447/python-neuroshare,abhay447/python-neuroshare,G-Node/python-neuroshare,G-Node/python-neuroshare
from Entity import * class EventEntity(Entity): EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): lib = self.file.library data = lib._get_event_data (self, index) return data doc: Add simple docs to Event entities
from Entity import * class EventEntity(Entity): """Event entities represent specific timepoints with associated data, e.g. a trigger events. Data can be binary (8, 16 or 32 bit) values, text or comma separated values (cvs). """ EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): """The type of the event: * binary (8, 16, 32 bit) [``EVENT_BYTE, EVENT_WORD, EVENT_DWORD``] * text [``EVENT_TEXT``] * comma separated values (csv) [``EVENT_CSV``] """ return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): """Retrieve the data at ``index``""" lib = self.file.library data = lib._get_event_data (self, index) return data
<commit_before> from Entity import * class EventEntity(Entity): EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): lib = self.file.library data = lib._get_event_data (self, index) return data <commit_msg>doc: Add simple docs to Event entities<commit_after>
from Entity import * class EventEntity(Entity): """Event entities represent specific timepoints with associated data, e.g. a trigger events. Data can be binary (8, 16 or 32 bit) values, text or comma separated values (cvs). """ EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): """The type of the event: * binary (8, 16, 32 bit) [``EVENT_BYTE, EVENT_WORD, EVENT_DWORD``] * text [``EVENT_TEXT``] * comma separated values (csv) [``EVENT_CSV``] """ return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): """Retrieve the data at ``index``""" lib = self.file.library data = lib._get_event_data (self, index) return data
from Entity import * class EventEntity(Entity): EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): lib = self.file.library data = lib._get_event_data (self, index) return data doc: Add simple docs to Event entities from Entity import * class EventEntity(Entity): """Event entities represent specific timepoints with associated data, e.g. a trigger events. Data can be binary (8, 16 or 32 bit) values, text or comma separated values (cvs). """ EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): """The type of the event: * binary (8, 16, 32 bit) [``EVENT_BYTE, EVENT_WORD, EVENT_DWORD``] * text [``EVENT_TEXT``] * comma separated values (csv) [``EVENT_CSV``] """ return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): """Retrieve the data at ``index``""" lib = self.file.library data = lib._get_event_data (self, index) return data
<commit_before> from Entity import * class EventEntity(Entity): EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): lib = self.file.library data = lib._get_event_data (self, index) return data <commit_msg>doc: Add simple docs to Event entities<commit_after> from Entity import * class EventEntity(Entity): """Event entities represent specific timepoints with associated data, e.g. a trigger events. Data can be binary (8, 16 or 32 bit) values, text or comma separated values (cvs). """ EVENT_TEXT = 1 EVENT_CSV = 2 EVENT_BYTE = 3 EVENT_WORD = 4 EVENT_DWORD = 5 def __init__(self, nsfile, eid, info): super(EventEntity,self).__init__(eid, nsfile, info) @property def event_type(self): """The type of the event: * binary (8, 16, 32 bit) [``EVENT_BYTE, EVENT_WORD, EVENT_DWORD``] * text [``EVENT_TEXT``] * comma separated values (csv) [``EVENT_CSV``] """ return self._info['EventType'] @property def csv_desc(self): return self._info['CSVDesc'] @property def max_data_length(self): return self._info['MaxDataLength'] def get_data (self, index): """Retrieve the data at ``index``""" lib = self.file.library data = lib._get_event_data (self, index) return data
63ba7b3f21f21d77ca72eca4503ad6edf3986ed5
src/nodeconductor_openstack/tests/unittests/test_handlers.py
src/nodeconductor_openstack/tests/unittests/test_handlers.py
from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0)
from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0)
Fix floating IP unit test.
Fix floating IP unit test.
Python
mit
opennode/nodeconductor-openstack
from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) Fix floating IP unit test.
from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0)
<commit_before>from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) <commit_msg>Fix floating IP unit test.<commit_after>
from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0)
from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) Fix floating IP unit test.from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0)
<commit_before>from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory(service_project_link=tenant.service_project_link, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) <commit_msg>Fix floating IP unit test.<commit_after>from django.test import TestCase from .. import factories class FloatingIpHandlersTest(TestCase): def test_floating_ip_count_quota_increases_on_floating_ip_creation(self): tenant = factories.TenantFactory() factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='UP') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) def test_floating_ip_count_quota_changes_on_floating_ip_status_change(self): tenant = factories.TenantFactory() floating_ip = factories.FloatingIPFactory( service_project_link=tenant.service_project_link, tenant=tenant, status='DOWN') self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0) floating_ip.status = 'UP' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 1) floating_ip.status = 'DOWN' floating_ip.save() self.assertEqual(tenant.quotas.get(name='floating_ip_count').usage, 0)
a9dd72ef0f82f6cb818cd3d265090ed280385033
tests/test_curry.py
tests/test_curry.py
import pytest from currypy import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3
import pytest from curryer import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3
Change package name in tests
Change package name in tests
Python
bsd-3-clause
sigmavirus24/curryer
import pytest from currypy import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3 Change package name in tests
import pytest from curryer import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3
<commit_before>import pytest from currypy import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3 <commit_msg>Change package name in tests<commit_after>
import pytest from curryer import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3
import pytest from currypy import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3 Change package name in testsimport pytest from curryer import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3
<commit_before>import pytest from currypy import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3 <commit_msg>Change package name in tests<commit_after>import pytest from curryer import curry class TestCurry: def test_curry_as_decorator(self): """Ensure that currypy.curry can be used as a decorator""" @curry def func(): pass assert func.curried is False def test_curry_refuses_None(self): """Ensure that currypy.curry refuses None""" with pytest.raises(TypeError): curry(None) def test_curries_when_given_parameters(self): @curry def add(a, b): return a + b assert add(1).curried is True def test_evaluates_when_given_enough_parameters(self): @curry def add(a, b): return a + b assert add(1)(2) == 3 assert add(1, 2) == 3
19e0deeb65a4e66e5ab623702701d82f1994d594
world_population.py
world_population.py
import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = pop_dict['Value'] print(country_name + ": " + population)
import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = int(float(pop_dict['Value'])) print(country_name + ": " + str(population))
Convert Strings into Numerical Values
Convert Strings into Numerical Values
Python
mit
4bic-attic/data_viz
import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = pop_dict['Value'] print(country_name + ": " + population) Convert Strings into Numerical Values
import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = int(float(pop_dict['Value'])) print(country_name + ": " + str(population))
<commit_before>import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = pop_dict['Value'] print(country_name + ": " + population) <commit_msg>Convert Strings into Numerical Values<commit_after>
import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = int(float(pop_dict['Value'])) print(country_name + ": " + str(population))
import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = pop_dict['Value'] print(country_name + ": " + population) Convert Strings into Numerical Valuesimport json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = int(float(pop_dict['Value'])) print(country_name + ": " + str(population))
<commit_before>import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = pop_dict['Value'] print(country_name + ": " + population) <commit_msg>Convert Strings into Numerical Values<commit_after>import json #load data onto a list filename = 'population_data.json' with open(filename) as f: pop_data = json.load(f) #print the 2010 population for each country for pop_dict in pop_data: if pop_dict['Year'] == '2010': country_name = pop_dict['Country Name'] population = int(float(pop_dict['Value'])) print(country_name + ": " + str(population))
593964161e260b5b34e557d0d90485d457595063
tests/test_utils.py
tests/test_utils.py
"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def setup_module(): global mock_request mock_request = patch.start()().request def teardown_module(): patch.stop() def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...'
"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def mock(): global mock_request mock_request = patch.start()().request def unmock(): patch.stop() @with_setup(mock, unmock) def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' @with_setup(mock, unmock) def test_get_application_access_token_raises_error(): mock_request.return_value.content = 'An unknown error occurred' with assert_raises(GraphAPI.FacebookError): get_application_access_token('<application id>', '<application secret key>')
Add test for failing get_application_access_token
Add test for failing get_application_access_token
Python
mit
merwok-forks/facepy,jwjohns/facepy,jgorset/facepy,liorshahverdi/facepy,Spockuto/facepy,buzzfeed/facepy,jwjohns/facepy
"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def setup_module(): global mock_request mock_request = patch.start()().request def teardown_module(): patch.stop() def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' Add test for failing get_application_access_token
"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def mock(): global mock_request mock_request = patch.start()().request def unmock(): patch.stop() @with_setup(mock, unmock) def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' @with_setup(mock, unmock) def test_get_application_access_token_raises_error(): mock_request.return_value.content = 'An unknown error occurred' with assert_raises(GraphAPI.FacebookError): get_application_access_token('<application id>', '<application secret key>')
<commit_before>"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def setup_module(): global mock_request mock_request = patch.start()().request def teardown_module(): patch.stop() def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' <commit_msg>Add test for failing get_application_access_token<commit_after>
"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def mock(): global mock_request mock_request = patch.start()().request def unmock(): patch.stop() @with_setup(mock, unmock) def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' @with_setup(mock, unmock) def test_get_application_access_token_raises_error(): mock_request.return_value.content = 'An unknown error occurred' with assert_raises(GraphAPI.FacebookError): get_application_access_token('<application id>', '<application secret key>')
"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def setup_module(): global mock_request mock_request = patch.start()().request def teardown_module(): patch.stop() def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' Add test for failing get_application_access_token"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def mock(): global mock_request mock_request = patch.start()().request def unmock(): patch.stop() @with_setup(mock, unmock) def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' @with_setup(mock, unmock) def test_get_application_access_token_raises_error(): mock_request.return_value.content = 'An unknown error occurred' with assert_raises(GraphAPI.FacebookError): get_application_access_token('<application id>', '<application secret key>')
<commit_before>"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def setup_module(): global mock_request mock_request = patch.start()().request def teardown_module(): patch.stop() def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' <commit_msg>Add test for failing get_application_access_token<commit_after>"""Tests for the ``utils`` module.""" from mock import patch, Mock as mock from nose.tools import * from facepy import * patch = patch('requests.session') def mock(): global mock_request mock_request = patch.start()().request def unmock(): patch.stop() @with_setup(mock, unmock) def test_get_application_access_token(): mock_request.return_value.content = 'access_token=...' access_token = get_application_access_token('<application id>', '<application secret key>') mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token', allow_redirects = True, params = { 'client_id': '<application id>', 'client_secret': '<application secret key>', 'grant_type': 'client_credentials' } ) assert access_token == '...' @with_setup(mock, unmock) def test_get_application_access_token_raises_error(): mock_request.return_value.content = 'An unknown error occurred' with assert_raises(GraphAPI.FacebookError): get_application_access_token('<application id>', '<application secret key>')
906d60089dbe6b263ae55d91ba73d6b6e41ebbb5
api/admin.py
api/admin.py
from django.contrib import admin from .models import MaintenanceRecord, UserPreferences @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] # Register your models here. admin.site.register(MaintenanceRecord)
from django.contrib import admin from .models import MaintenanceRecord, UserPreferences, HelpLink @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] @admin.register(HelpLink) class HelpLinkAdmin(admin.ModelAdmin): actions = None # disable the `delete selected` action list_display = ["link_key", "topic", "context", "href"] def get_readonly_fields(self, request, obj=None): if obj: # editing an existing object return self.readonly_fields + ("link_key", ) return self.readonly_fields def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False # Register your models here. admin.site.register(MaintenanceRecord)
Add entire in Admin for managing HelpLink
Add entire in Admin for managing HelpLink An admin can _only_ modify the hyperlink associated with a HelpLink. As a consequence, you cannot add new instances of the model nor delete them. Only the existing HelpLinks can be modified because their inclusion (or existence) is dependent upon the usage within the UI. If one *must* do something to add or delete or override what is allowed via Django Admin, they will _need_ database/SQL level access given this current implementation. See ATMO-1230 & ATMO-1270 for more context.
Python
apache-2.0
CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend
from django.contrib import admin from .models import MaintenanceRecord, UserPreferences @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] # Register your models here. admin.site.register(MaintenanceRecord) Add entire in Admin for managing HelpLink An admin can _only_ modify the hyperlink associated with a HelpLink. As a consequence, you cannot add new instances of the model nor delete them. Only the existing HelpLinks can be modified because their inclusion (or existence) is dependent upon the usage within the UI. If one *must* do something to add or delete or override what is allowed via Django Admin, they will _need_ database/SQL level access given this current implementation. See ATMO-1230 & ATMO-1270 for more context.
from django.contrib import admin from .models import MaintenanceRecord, UserPreferences, HelpLink @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] @admin.register(HelpLink) class HelpLinkAdmin(admin.ModelAdmin): actions = None # disable the `delete selected` action list_display = ["link_key", "topic", "context", "href"] def get_readonly_fields(self, request, obj=None): if obj: # editing an existing object return self.readonly_fields + ("link_key", ) return self.readonly_fields def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False # Register your models here. admin.site.register(MaintenanceRecord)
<commit_before>from django.contrib import admin from .models import MaintenanceRecord, UserPreferences @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] # Register your models here. admin.site.register(MaintenanceRecord) <commit_msg>Add entire in Admin for managing HelpLink An admin can _only_ modify the hyperlink associated with a HelpLink. As a consequence, you cannot add new instances of the model nor delete them. Only the existing HelpLinks can be modified because their inclusion (or existence) is dependent upon the usage within the UI. If one *must* do something to add or delete or override what is allowed via Django Admin, they will _need_ database/SQL level access given this current implementation. See ATMO-1230 & ATMO-1270 for more context.<commit_after>
from django.contrib import admin from .models import MaintenanceRecord, UserPreferences, HelpLink @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] @admin.register(HelpLink) class HelpLinkAdmin(admin.ModelAdmin): actions = None # disable the `delete selected` action list_display = ["link_key", "topic", "context", "href"] def get_readonly_fields(self, request, obj=None): if obj: # editing an existing object return self.readonly_fields + ("link_key", ) return self.readonly_fields def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False # Register your models here. admin.site.register(MaintenanceRecord)
from django.contrib import admin from .models import MaintenanceRecord, UserPreferences @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] # Register your models here. admin.site.register(MaintenanceRecord) Add entire in Admin for managing HelpLink An admin can _only_ modify the hyperlink associated with a HelpLink. As a consequence, you cannot add new instances of the model nor delete them. Only the existing HelpLinks can be modified because their inclusion (or existence) is dependent upon the usage within the UI. If one *must* do something to add or delete or override what is allowed via Django Admin, they will _need_ database/SQL level access given this current implementation. See ATMO-1230 & ATMO-1270 for more context.from django.contrib import admin from .models import MaintenanceRecord, UserPreferences, HelpLink @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] @admin.register(HelpLink) class HelpLinkAdmin(admin.ModelAdmin): actions = None # disable the `delete selected` action list_display = ["link_key", "topic", "context", "href"] def get_readonly_fields(self, request, obj=None): if obj: # editing an existing object return self.readonly_fields + ("link_key", ) return self.readonly_fields def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False # Register your models here. admin.site.register(MaintenanceRecord)
<commit_before>from django.contrib import admin from .models import MaintenanceRecord, UserPreferences @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] # Register your models here. admin.site.register(MaintenanceRecord) <commit_msg>Add entire in Admin for managing HelpLink An admin can _only_ modify the hyperlink associated with a HelpLink. As a consequence, you cannot add new instances of the model nor delete them. Only the existing HelpLinks can be modified because their inclusion (or existence) is dependent upon the usage within the UI. If one *must* do something to add or delete or override what is allowed via Django Admin, they will _need_ database/SQL level access given this current implementation. See ATMO-1230 & ATMO-1270 for more context.<commit_after>from django.contrib import admin from .models import MaintenanceRecord, UserPreferences, HelpLink @admin.register(UserPreferences) class UserPreferencesAdmin(admin.ModelAdmin): list_display = ["user", "show_beta_interface", "airport_ui", "created_date", "modified_date"] list_filter = [ "show_beta_interface", "airport_ui", ] @admin.register(HelpLink) class HelpLinkAdmin(admin.ModelAdmin): actions = None # disable the `delete selected` action list_display = ["link_key", "topic", "context", "href"] def get_readonly_fields(self, request, obj=None): if obj: # editing an existing object return self.readonly_fields + ("link_key", ) return self.readonly_fields def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False # Register your models here. admin.site.register(MaintenanceRecord)
2b3e281c228a4efa9483362f10eac74ce4da6178
parliament/legacy_urls.py
parliament/legacy_urls.py
from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('parliament.hansards.views.index')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ]
from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('debates')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ]
Fix a couple of redirect URLs
Fix a couple of redirect URLs
Python
agpl-3.0
litui/openparliament,litui/openparliament,rhymeswithcycle/openparliament,rhymeswithcycle/openparliament,rhymeswithcycle/openparliament,litui/openparliament
from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('parliament.hansards.views.index')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ] Fix a couple of redirect URLs
from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('debates')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ]
<commit_before>from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('parliament.hansards.views.index')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ] <commit_msg>Fix a couple of redirect URLs<commit_after>
from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('debates')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ]
from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('parliament.hansards.views.index')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ] Fix a couple of redirect URLsfrom django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('debates')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ]
<commit_before>from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('parliament.hansards.views.index')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ] <commit_msg>Fix a couple of redirect URLs<commit_after>from django.conf.urls import url from parliament.core.utils import redir_view from parliament.hansards.redirect_views import hansard_redirect urlpatterns = [ url(r'^hansards/$', redir_view('debates')), url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')), url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect), url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect), ]
4c4b09e1bfbd60bfe1453c5a3b3e8f13d2eaa4ce
comet/tcp/test/test_voeventsubscriber.py
comet/tcp/test/test_voeventsubscriber.py
from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost()
from twisted.internet import task from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() class VOEventSubscriberTimeoutTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.clock = task.Clock() self.proto.callLater = self.clock.callLater self.tr = proto_helpers.StringTransport() self.proto.makeConnection(self.tr) def test_timeout(self): self.clock.advance(self.proto.ALIVE_INTERVAL) self.assertEqual(self.tr.disconnecting, True)
Add test for subscriber timeout
Add test for subscriber timeout
Python
bsd-2-clause
jdswinbank/Comet,jdswinbank/Comet
from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() Add test for subscriber timeout
from twisted.internet import task from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() class VOEventSubscriberTimeoutTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.clock = task.Clock() self.proto.callLater = self.clock.callLater self.tr = proto_helpers.StringTransport() self.proto.makeConnection(self.tr) def test_timeout(self): self.clock.advance(self.proto.ALIVE_INTERVAL) self.assertEqual(self.tr.disconnecting, True)
<commit_before>from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() <commit_msg>Add test for subscriber timeout<commit_after>
from twisted.internet import task from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() class VOEventSubscriberTimeoutTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.clock = task.Clock() self.proto.callLater = self.clock.callLater self.tr = proto_helpers.StringTransport() self.proto.makeConnection(self.tr) def test_timeout(self): self.clock.advance(self.proto.ALIVE_INTERVAL) self.assertEqual(self.tr.disconnecting, True)
from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() Add test for subscriber timeoutfrom twisted.internet import task from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() class VOEventSubscriberTimeoutTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.clock = task.Clock() self.proto.callLater = self.clock.callLater self.tr = proto_helpers.StringTransport() self.proto.makeConnection(self.tr) def test_timeout(self): self.clock.advance(self.proto.ALIVE_INTERVAL) self.assertEqual(self.tr.disconnecting, True)
<commit_before>from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() <commit_msg>Add test for subscriber timeout<commit_after>from twisted.internet import task from twisted.trial import unittest from twisted.test import proto_helpers from ...test.support import DUMMY_EVENT_IVORN as DUMMY_IVORN from ..protocol import VOEventSubscriber from ..protocol import VOEventSubscriberFactory class VOEventSubscriberFactoryTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.proto.makeConnection(proto_helpers.StringTransport()) def test_protocol(self): self.assertIsInstance(self.proto, VOEventSubscriber) def tearDown(self): self.proto.connectionLost() class VOEventSubscriberTimeoutTestCase(unittest.TestCase): def setUp(self): factory = VOEventSubscriberFactory(DUMMY_IVORN) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.clock = task.Clock() self.proto.callLater = self.clock.callLater self.tr = proto_helpers.StringTransport() self.proto.makeConnection(self.tr) def test_timeout(self): self.clock.advance(self.proto.ALIVE_INTERVAL) self.assertEqual(self.tr.disconnecting, True)
6c6d7e3dc2c61b13d17f30ddd7607a4dfb2ef86d
nova/policies/migrate_server.py
nova/policies/migrate_server.py
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate', base.RULE_ADMIN_API, "Cold migrate a server to a host", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ]), policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate_live', base.RULE_ADMIN_API, "Live migrate a server to a new host without a reboot", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ]), ] def list_rules(): return migrate_server_policies
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate', check_str=base.RULE_ADMIN_API, description="Cold migrate a server to a host", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ], scope_types=['system', 'project']), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate_live', check_str=base.RULE_ADMIN_API, description="Live migrate a server to a new host without a reboot", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ], scope_types=['system', 'project']), ] def list_rules(): return migrate_server_policies
Introduce scope_types in migrate server
Introduce scope_types in migrate server oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for migrate server API policies as 'system'. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be
Python
apache-2.0
klmitch/nova,openstack/nova,openstack/nova,mahak/nova,klmitch/nova,mahak/nova,klmitch/nova,openstack/nova,mahak/nova,klmitch/nova
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate', base.RULE_ADMIN_API, "Cold migrate a server to a host", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ]), policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate_live', base.RULE_ADMIN_API, "Live migrate a server to a new host without a reboot", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ]), ] def list_rules(): return migrate_server_policies Introduce scope_types in migrate server oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for migrate server API policies as 'system'. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate', check_str=base.RULE_ADMIN_API, description="Cold migrate a server to a host", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ], scope_types=['system', 'project']), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate_live', check_str=base.RULE_ADMIN_API, description="Live migrate a server to a new host without a reboot", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ], scope_types=['system', 'project']), ] def list_rules(): return migrate_server_policies
<commit_before># Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate', base.RULE_ADMIN_API, "Cold migrate a server to a host", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ]), policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate_live', base.RULE_ADMIN_API, "Live migrate a server to a new host without a reboot", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ]), ] def list_rules(): return migrate_server_policies <commit_msg>Introduce scope_types in migrate server oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for migrate server API policies as 'system'. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be<commit_after>
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate', check_str=base.RULE_ADMIN_API, description="Cold migrate a server to a host", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ], scope_types=['system', 'project']), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate_live', check_str=base.RULE_ADMIN_API, description="Live migrate a server to a new host without a reboot", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ], scope_types=['system', 'project']), ] def list_rules(): return migrate_server_policies
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate', base.RULE_ADMIN_API, "Cold migrate a server to a host", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ]), policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate_live', base.RULE_ADMIN_API, "Live migrate a server to a new host without a reboot", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ]), ] def list_rules(): return migrate_server_policies Introduce scope_types in migrate server oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for migrate server API policies as 'system'. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate', check_str=base.RULE_ADMIN_API, description="Cold migrate a server to a host", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ], scope_types=['system', 'project']), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate_live', check_str=base.RULE_ADMIN_API, description="Live migrate a server to a new host without a reboot", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ], scope_types=['system', 'project']), ] def list_rules(): return migrate_server_policies
<commit_before># Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate', base.RULE_ADMIN_API, "Cold migrate a server to a host", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ]), policy.DocumentedRuleDefault( POLICY_ROOT % 'migrate_live', base.RULE_ADMIN_API, "Live migrate a server to a new host without a reboot", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ]), ] def list_rules(): return migrate_server_policies <commit_msg>Introduce scope_types in migrate server oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for migrate server API policies as 'system'. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: Icba4c14f240215fd56f1cdd9814cc81ebf2796be<commit_after># Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-migrate-server:%s' migrate_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate', check_str=base.RULE_ADMIN_API, description="Cold migrate a server to a host", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (migrate)' } ], scope_types=['system', 'project']), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'migrate_live', check_str=base.RULE_ADMIN_API, description="Live migrate a server to a new host without a reboot", operations=[ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-migrateLive)' } ], scope_types=['system', 'project']), ] def list_rules(): return migrate_server_policies
fd96c3160f0a54fab4bd18e9cc2585c0a3420f75
tests/test_models.py
tests/test_models.py
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == 'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
Add unicode encoding to sted.navn
Add unicode encoding to sted.navn
Python
mit
Turbasen/turbasen.py
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == 'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != '' Add unicode encoding to sted.navn
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
<commit_before>import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == 'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != '' <commit_msg>Add unicode encoding to sted.navn<commit_after>
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == 'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != '' Add unicode encoding to sted.navnimport pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
<commit_before>import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == 'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != '' <commit_msg>Add unicode encoding to sted.navn<commit_after>import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
e0510ea02ad1998973a9e0733f2342b06ddcf182
test/python_api/default-constructor/sb_breakpointlocation.py
test/python_api/default-constructor/sb_breakpointlocation.py
""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint)
""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetAddress() obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint)
Add fuzz call for SBBreakpointLocation.GetAddress().
Add fuzz call for SBBreakpointLocation.GetAddress(). git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141443 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb
""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint) Add fuzz call for SBBreakpointLocation.GetAddress(). git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141443 91177308-0d34-0410-b5e6-96231b3b80d8
""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetAddress() obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint)
<commit_before>""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint) <commit_msg>Add fuzz call for SBBreakpointLocation.GetAddress(). git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141443 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetAddress() obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint)
""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint) Add fuzz call for SBBreakpointLocation.GetAddress(). git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141443 91177308-0d34-0410-b5e6-96231b3b80d8""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetAddress() obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint)
<commit_before>""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint) <commit_msg>Add fuzz call for SBBreakpointLocation.GetAddress(). git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141443 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>""" Fuzz tests an object after the default construction to make sure it does not crash lldb. """ import sys import lldb def fuzz_obj(obj): obj.GetAddress() obj.GetLoadAddress() obj.SetEnabled(True) obj.IsEnabled() obj.SetCondition("i >= 10") obj.GetCondition() obj.SetThreadID(0) obj.GetThreadID() obj.SetThreadIndex(0) obj.GetThreadIndex() obj.SetThreadName("worker thread") obj.GetThreadName() obj.SetQueueName("my queue") obj.GetQueueName() obj.IsResolved() obj.GetDescription(lldb.SBStream(), lldb.eDescriptionLevelVerbose) breakpoint = obj.GetBreakpoint() # Do fuzz testing on the breakpoint obj, it should not crash lldb. import sb_breakpoint sb_breakpoint.fuzz_obj(breakpoint)
5a5e4341f60ac70c7f4182ef2f248a3c518ba0fb
timesketch/apps/sketch/migrations/0010_auto_20141110_1129.py
timesketch/apps/sketch/migrations/0010_auto_20141110_1129.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]
# -*- coding: utf-8 -*- # Auto generated by Django migrate from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]
Add auto generated note on migration
Add auto generated note on migration
Python
apache-2.0
lockhy/timesketch,armuk/timesketch,lockhy/timesketch,google/timesketch,google/timesketch,armuk/timesketch,armuk/timesketch,google/timesketch,lockhy/timesketch,lockhy/timesketch,google/timesketch,armuk/timesketch
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ] Add auto generated note on migration
# -*- coding: utf-8 -*- # Auto generated by Django migrate from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ] <commit_msg>Add auto generated note on migration<commit_after>
# -*- coding: utf-8 -*- # Auto generated by Django migrate from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ] Add auto generated note on migration# -*- coding: utf-8 -*- # Auto generated by Django migrate from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ] <commit_msg>Add auto generated note on migration<commit_after># -*- coding: utf-8 -*- # Auto generated by Django migrate from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sketch', '0009_merge'), ] operations = [ migrations.RemoveField( model_name='sketch', name='timelines', ), migrations.AddField( model_name='sketchtimeline', name='sketch', field=models.ForeignKey(default=2, to='sketch.Sketch'), preserve_default=True, ), migrations.AddField( model_name='sketchtimeline', name='user', field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]