commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
a6fe31d7f687df6934143fd2dda1cd323f3d31fb
uvloop/_patch.py
uvloop/_patch.py
import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) if coro.cr_running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine
import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) \ and not hasattr(coro, 'cr_code') \ and not hasattr(coro, 'gi_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) running = False try: running = coro.cr_running except AttributeError: try: running = coro.gi_running except AttributeError: pass if running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine
Fix patched _format_coroutine to support Cython generators
Fix patched _format_coroutine to support Cython generators
Python
apache-2.0
1st1/uvloop,MagicStack/uvloop,MagicStack/uvloop
import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) if coro.cr_running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine Fix patched _format_coroutine to support Cython generators
import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) \ and not hasattr(coro, 'cr_code') \ and not hasattr(coro, 'gi_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) running = False try: running = coro.cr_running except AttributeError: try: running = coro.gi_running except AttributeError: pass if running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine
<commit_before>import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) if coro.cr_running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine <commit_msg>Fix patched _format_coroutine to support Cython generators<commit_after>
import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) \ and not hasattr(coro, 'cr_code') \ and not hasattr(coro, 'gi_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) running = False try: running = coro.cr_running except AttributeError: try: running = coro.gi_running except AttributeError: pass if running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine
import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) if coro.cr_running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine Fix patched _format_coroutine to support Cython generatorsimport asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) \ and not hasattr(coro, 'cr_code') \ and not hasattr(coro, 'gi_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) running = False try: running = coro.cr_running except AttributeError: try: running = coro.gi_running except AttributeError: pass if running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine
<commit_before>import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) if coro.cr_running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine <commit_msg>Fix patched _format_coroutine to support Cython generators<commit_after>import asyncio from asyncio import coroutines def _format_coroutine(coro): if asyncio.iscoroutine(coro) \ and not hasattr(coro, 'cr_code') \ and not hasattr(coro, 'gi_code'): # Most likely a Cython coroutine coro_name = '{}()'.format(coro.__qualname__ or coro.__name__) running = False try: running = coro.cr_running except AttributeError: try: running = coro.gi_running except AttributeError: pass if running: return '{} running'.format(coro_name) else: return coro_name return _old_format_coroutine(coro) _old_format_coroutine = coroutines._format_coroutine coroutines._format_coroutine = _format_coroutine
452c3c4258c8dce5bd6f9e6799fe24253a800651
setup.py
setup.py
from distutils.core import setup import dpy setup(name = "DistPy", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler'])
from distutils.core import setup import dpy setup(name = "DistAlgo", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler'])
Change project name (back) to "DistAlgo".
Change project name (back) to "DistAlgo". * setup.py: Change project name to "DistAlgo".
Python
mit
sghosh1991/distalgo,mayli/DistAlgo,sghosh1991/distalgo,mayli/DistAlgo
from distutils.core import setup import dpy setup(name = "DistPy", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler']) Change project name (back) to "DistAlgo". * setup.py: Change project name to "DistAlgo".
from distutils.core import setup import dpy setup(name = "DistAlgo", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler'])
<commit_before>from distutils.core import setup import dpy setup(name = "DistPy", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler']) <commit_msg>Change project name (back) to "DistAlgo". * setup.py: Change project name to "DistAlgo".<commit_after>
from distutils.core import setup import dpy setup(name = "DistAlgo", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler'])
from distutils.core import setup import dpy setup(name = "DistPy", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler']) Change project name (back) to "DistAlgo". * setup.py: Change project name to "DistAlgo".from distutils.core import setup import dpy setup(name = "DistAlgo", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler'])
<commit_before>from distutils.core import setup import dpy setup(name = "DistPy", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler']) <commit_msg>Change project name (back) to "DistAlgo". * setup.py: Change project name to "DistAlgo".<commit_after>from distutils.core import setup import dpy setup(name = "DistAlgo", version = dpy.__version__, author= "bolin", author_email = "bolin@cs.stonybrook.edu", packages = ['dpy', 'dpy.compiler'])
b87c237ef7ec77f3f2224f609cac19f12fe0fa2e
setup.py
setup.py
"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import find_packages, setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=find_packages(exclude=('tests',)), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False )
"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=('txrudp'), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False )
Make txrudp the sole distributed package.
Make txrudp the sole distributed package.
Python
mit
Renelvon/txrudp,OpenBazaar/txrudp,jorik041/txrudp
"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import find_packages, setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=find_packages(exclude=('tests',)), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False ) Make txrudp the sole distributed package.
"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=('txrudp'), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False )
<commit_before>"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import find_packages, setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=find_packages(exclude=('tests',)), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False ) <commit_msg>Make txrudp the sole distributed package.<commit_after>
"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=('txrudp'), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False )
"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import find_packages, setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=find_packages(exclude=('tests',)), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False ) Make txrudp the sole distributed package."""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=('txrudp'), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False )
<commit_before>"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import find_packages, setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=find_packages(exclude=('tests',)), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False ) <commit_msg>Make txrudp the sole distributed package.<commit_after>"""Setup module for txrudp.""" import codecs from os import path import sys from setuptools import setup _HERE = path.abspath(path.dirname(__file__)) with codecs.open(path.join(_HERE, 'README.md'), encoding='utf-8') as f: _LONG_DESCRIPTION = f.read() setup( name='txrudp', version='0.1.0', description='A Twisted extension implementing RUDP', long_description=_LONG_DESCRIPTION, url='https://github.com/Renelvon/txrudp', author='Nikolaos Korasidis', author_email='renelvon@gmail.com', license='MIT', classifiers=( 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: System :: Networking' ), keywords='rudp twisted reliable', packages=('txrudp'), install_requires=('jsonschema', 'twisted'), extras_require={ ':python_version<="3.4"': ('argparse',), 'dev': ('coverage', 'mock', 'nose', 'prospector') }, zip_safe=False )
60d2137878b78620444ce533fcf4ee50ba7b8be1
stack.py
stack.py
#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top
#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value
Fix peek method to return value instead of object
Fix peek method to return value instead of object
Python
mit
jwarren116/data-structures-deux
#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top Fix peek method to return value instead of object
#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value
<commit_before>#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top <commit_msg>Fix peek method to return value instead of object<commit_after>
#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value
#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top Fix peek method to return value instead of object#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value
<commit_before>#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top <commit_msg>Fix peek method to return value instead of object<commit_after>#!/usr/bin/env python '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value
7e15a22ed01b4aa68a73a0e8e10d88d9b3785062
aiohttp_json_rpc/__init__.py
aiohttp_json_rpc/__init__.py
from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, )
from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient, JsonRpcClientContext # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, )
Add import JsonRpcClientContext in the main module
Add import JsonRpcClientContext in the main module
Python
apache-2.0
pengutronix/aiohttp-json-rpc,pengutronix/aiohttp-json-rpc,pengutronix/aiohttp-json-rpc
from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, ) Add import JsonRpcClientContext in the main module
from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient, JsonRpcClientContext # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, )
<commit_before>from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, ) <commit_msg>Add import JsonRpcClientContext in the main module<commit_after>
from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient, JsonRpcClientContext # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, )
from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, ) Add import JsonRpcClientContext in the main modulefrom .decorators import raw_response, validate # NOQA from .client import JsonRpcClient, JsonRpcClientContext # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, )
<commit_before>from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, ) <commit_msg>Add import JsonRpcClientContext in the main module<commit_after>from .decorators import raw_response, validate # NOQA from .client import JsonRpcClient, JsonRpcClientContext # NOQA from .rpc import JsonRpc # NOQA from .exceptions import ( # NOQA RpcGenericServerDefinedError, RpcInvalidRequestError, RpcMethodNotFoundError, RpcInvalidParamsError, RpcError, )
a692c339983ae0252577635751b67324985275dc
background_hang_reporter_job/tracked.py
background_hang_reporter_job/tracked.py
class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Activity Stream Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
Fix Activity Stream category title
Fix Activity Stream category title
Python
mit
squarewave/background-hang-reporter-job,squarewave/background-hang-reporter-job
class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs] Fix Activity Stream category title
class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Activity Stream Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
<commit_before>class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs] <commit_msg>Fix Activity Stream category title<commit_after>
class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Activity Stream Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs] Fix Activity Stream category titleclass AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Activity Stream Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
<commit_before>class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs] <commit_msg>Fix Activity Stream category title<commit_after>class AllHangs(object): title = "All Hangs" @staticmethod def matches_hang(_): return True class DevtoolsHangs(object): title = "Devtools Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame for frame, lib in stack) class ActivityStreamHangs(object): title = "Activity Stream Hangs" @staticmethod def matches_hang(hang): #pylint: disable=unused-variable stack, duration, thread, runnable, process, annotations, build_date, platform = hang return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame for frame, lib in stack) def get_tracked_stats(): return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
6b3dc6528a8172c4e07dfd63a57d490c8484700d
handroll/composers/atom.py
handroll/composers/atom.py
# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n')
# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title', id='temporary') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n')
Fix Travis while the Atom support is incomplete.
Fix Travis while the Atom support is incomplete.
Python
bsd-2-clause
handroll/handroll
# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n') Fix Travis while the Atom support is incomplete.
# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title', id='temporary') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n')
<commit_before># Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n') <commit_msg>Fix Travis while the Atom support is incomplete.<commit_after>
# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title', id='temporary') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n')
# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n') Fix Travis while the Atom support is incomplete.# Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title', id='temporary') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n')
<commit_before># Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n') <commit_msg>Fix Travis while the Atom support is incomplete.<commit_after># Copyright (c) 2014, Matt Layman import os import sys from werkzeug.contrib.atom import AtomFeed from handroll import logger from handroll.composers import Composer class AtomComposer(Composer): """Compose an Atom feed from an Atom metadata file (``.atom``). The ``AtomComposer`` parses the metadata specified in the source file and produces an XML Atom feed. ``AtomComposer`` uses parameters that are needed by Werkzeug's ``AtomFeed`` API. Refer to the `Werkzeug documentation <http://werkzeug.pocoo.org/docs/contrib/atom/>`_ for all the available options. """ def compose(self, template, source_file, out_dir): logger.info('Generating Atom XML for {0} ...'.format(source_file)) # TODO: Determine what the input file will look like (YAML? JSON?). try: feed = AtomFeed('Dummy Title', id='temporary') except ValueError as error: logger.error('Invalid feed {0}: {1}'.format( source_file, error.message)) sys.exit('Incomplete.') root, _ = os.path.splitext(os.path.basename(source_file)) output_file = os.path.join(out_dir, root + '.xml') with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n')
a23011dbd6500094f1e2632a998395e32739bb45
app/drivers/mslookup/proteinquant.py
app/drivers/mslookup/proteinquant.py
from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincols = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincols, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern)
from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincol = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincol, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern)
Correct variable name is singular here
Correct variable name is singular here
Python
mit
glormph/msstitch
from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincols = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincols, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern) Correct variable name is singular here
from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincol = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincol, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern)
<commit_before>from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincols = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincols, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern) <commit_msg>Correct variable name is singular here<commit_after>
from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincol = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincol, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern)
from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincols = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincols, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern) Correct variable name is singular herefrom app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincol = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincol, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern)
<commit_before>from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincols = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincols, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern) <commit_msg>Correct variable name is singular here<commit_after>from app.actions.mslookup import proteinquant as lookups from app.drivers.mslookup import base class ProteinQuantLookupDriver(base.LookupDriver): """Creates lookup of protein tables that contain quant data""" lookuptype = 'prottable' def __init__(self, **kwargs): super().__init__(**kwargs) self.poolnames = [x.replace('"', '') for x in kwargs.get('setnames')] # FIXME need check to see same poolnames correlate with self.fn len self.quantcolpattern = kwargs.get('quantcolpattern', None) self.psmnrcolpattern = kwargs.get('psmnrcolpattern', None) self.precursorquantcolpattern = kwargs.get('precursorquantcolpattern', None) self.proteincol = kwargs.get('protcol', None) - 1 self.probcolpattern = kwargs.get('probcolpattern', None) self.fdrcolpattern = kwargs.get('fdrcolpattern', None) self.pepcolpattern = kwargs.get('pepcolpattern', None) def create_lookup(self): lookups.create_proteinquant_lookup(self.fn, self.lookup, self.poolnames, self.proteincol, self.precursorquantcolpattern, self.quantcolpattern, self.psmnrcolpattern, self.probcolpattern, self.fdrcolpattern, self.pepcolpattern)
56b5b4d49973702bcb95bb36dcd1e35f40b57a1d
hyper/http20/exceptions.py
hyper/http20/exceptions.py
# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass
# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass class StreamResetError(HTTP20Error): """ A stream was forcefully reset by the remote party. """ pass
Add exception for streams forcefully reset
Add exception for streams forcefully reset
Python
mit
Lukasa/hyper,plucury/hyper,fredthomsen/hyper,irvind/hyper,fredthomsen/hyper,lawnmowerlatte/hyper,lawnmowerlatte/hyper,Lukasa/hyper,plucury/hyper,irvind/hyper
# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass Add exception for streams forcefully reset
# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass class StreamResetError(HTTP20Error): """ A stream was forcefully reset by the remote party. """ pass
<commit_before># -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass <commit_msg>Add exception for streams forcefully reset<commit_after>
# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass class StreamResetError(HTTP20Error): """ A stream was forcefully reset by the remote party. """ pass
# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass Add exception for streams forcefully reset# -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass class StreamResetError(HTTP20Error): """ A stream was forcefully reset by the remote party. """ pass
<commit_before># -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass <commit_msg>Add exception for streams forcefully reset<commit_after># -*- coding: utf-8 -*- """ hyper/http20/exceptions ~~~~~~~~~~~~~~~~~~~~~~~ This defines exceptions used in the HTTP/2 portion of hyper. """ class HTTP20Error(Exception): """ The base class for all of ``hyper``'s HTTP/2-related exceptions. """ pass class HPACKEncodingError(HTTP20Error): """ An error has been encountered while performing HPACK encoding. """ pass class HPACKDecodingError(HTTP20Error): """ An error has been encountered while performing HPACK decoding. """ pass class ConnectionError(HTTP20Error): """ The remote party signalled an error affecting the entire HTTP/2 connection, and the connection has been closed. """ pass class ProtocolError(HTTP20Error): """ The remote party violated the HTTP/2 protocol. """ pass class StreamResetError(HTTP20Error): """ A stream was forcefully reset by the remote party. """ pass
91b281a2d8e0938404fa533c7a4ff9f2a251d1b1
backend/populate_targets.py
backend/populate_targets.py
import django import os import yaml from backend.settings import BASE_DIR os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.values() for target in targets: create_target(target)
import django import os import yaml from backend.settings import BASE_DIR from django.db import IntegrityError os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( name=target['name'], endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tname: {} \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.name, t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.items() for t in targets: target = t[1] target['name'] = t[0] try: create_target(target) except (IntegrityError, ValueError), err: if isinstance(err, IntegrityError): print '[!] Target "{}" already exists.'.format(target['name']) elif isinstance(err, ValueError): print '[!] Invalid parameters for target "{}".'.format(target['name'])
Add name when creating Target
Add name when creating Target
Python
mit
dimkarakostas/rupture,dionyziz/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture,esarafianou/rupture,dimriou/rupture,dionyziz/rupture,dionyziz/rupture,esarafianou/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,esarafianou/rupture
import django import os import yaml from backend.settings import BASE_DIR os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.values() for target in targets: create_target(target) Add name when creating Target
import django import os import yaml from backend.settings import BASE_DIR from django.db import IntegrityError os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( name=target['name'], endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tname: {} \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.name, t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.items() for t in targets: target = t[1] target['name'] = t[0] try: create_target(target) except (IntegrityError, ValueError), err: if isinstance(err, IntegrityError): print '[!] Target "{}" already exists.'.format(target['name']) elif isinstance(err, ValueError): print '[!] Invalid parameters for target "{}".'.format(target['name'])
<commit_before>import django import os import yaml from backend.settings import BASE_DIR os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.values() for target in targets: create_target(target) <commit_msg>Add name when creating Target<commit_after>
import django import os import yaml from backend.settings import BASE_DIR from django.db import IntegrityError os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( name=target['name'], endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tname: {} \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.name, t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.items() for t in targets: target = t[1] target['name'] = t[0] try: create_target(target) except (IntegrityError, ValueError), err: if isinstance(err, IntegrityError): print '[!] Target "{}" already exists.'.format(target['name']) elif isinstance(err, ValueError): print '[!] Invalid parameters for target "{}".'.format(target['name'])
import django import os import yaml from backend.settings import BASE_DIR os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.values() for target in targets: create_target(target) Add name when creating Targetimport django import os import yaml from backend.settings import BASE_DIR from django.db import IntegrityError os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( name=target['name'], endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tname: {} \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.name, t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.items() for t in targets: target = t[1] target['name'] = t[0] try: create_target(target) except (IntegrityError, ValueError), err: if isinstance(err, IntegrityError): print '[!] Target "{}" already exists.'.format(target['name']) elif isinstance(err, ValueError): print '[!] Invalid parameters for target "{}".'.format(target['name'])
<commit_before>import django import os import yaml from backend.settings import BASE_DIR os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.values() for target in targets: create_target(target) <commit_msg>Add name when creating Target<commit_after>import django import os import yaml from backend.settings import BASE_DIR from django.db import IntegrityError os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target def create_target(target): t = Target( name=target['name'], endpoint=target['endpoint'], prefix=target['prefix'], alphabet=target['alphabet'], secretlength=target['secretlength'], alignmentalphabet=target['alignmentalphabet'], recordscardinality=target['recordscardinality'] ) t.save() print '''Created Target: \tname: {} \tendpoint: {} \tprefix: {} \talphabet: {} \tsecretlength: {} \talignmentalphabet: {} \trecordscardinality'''.format( t.name, t.endpoint, t.prefix, t.alphabet, t.secretlength, t.alignmentalphabet, t.recordscardinality ) if __name__ == '__main__': try: with open(os.path.join(BASE_DIR, 'target_config.yml'), 'r') as ymlconf: cfg = yaml.load(ymlconf) except IOError, err: print 'IOError: %s' % err exit(1) targets = cfg.items() for t in targets: target = t[1] target['name'] = t[0] try: create_target(target) except (IntegrityError, ValueError), err: if isinstance(err, IntegrityError): print '[!] Target "{}" already exists.'.format(target['name']) elif isinstance(err, ValueError): print '[!] Invalid parameters for target "{}".'.format(target['name'])
a055f97a342f670171f30095cabfd4ba1bfdad17
images/singleuser/user-config.py
images/singleuser/user-config.py
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate[fam]['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
Fix dictionary access pattern for setting auth tokens
Fix dictionary access pattern for setting auth tokens
Python
mit
yuvipanda/paws,yuvipanda/paws
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate[fam]['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam Fix dictionary access pattern for setting auth tokens
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
<commit_before>import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate[fam]['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam <commit_msg>Fix dictionary access pattern for setting auth tokens<commit_after>
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate[fam]['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam Fix dictionary access pattern for setting auth tokensimport os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
<commit_before>import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate[fam]['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam <commit_msg>Fix dictionary access pattern for setting auth tokens<commit_after>import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam
d05a16474c9eabc7f52d8d9c6811e4d01bea6080
bongo/settings/travis.py
bongo/settings/travis.py
from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'
from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture', '--verbosity=2'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'
Make test output more verbose so we can figure out what is hanging
Make test output more verbose so we can figure out what is hanging
Python
mit
BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo
from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'Make test output more verbose so we can figure out what is hanging
from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture', '--verbosity=2'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'
<commit_before>from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'<commit_msg>Make test output more verbose so we can figure out what is hanging<commit_after>
from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture', '--verbosity=2'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'
from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'Make test output more verbose so we can figure out what is hangingfrom bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture', '--verbosity=2'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'
<commit_before>from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'<commit_msg>Make test output more verbose so we can figure out what is hanging<commit_after>from bongo.settings.prod import * # The same settings as production, but no database password. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'bongo_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '127.0.0.1', 'PORT': '5432', }, } INSTALLED_APPS += ( 'django_nose', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--with-fixture-bundling', '--nologcapture', '--verbosity=2'] NOSE_TESTMATCH = '(?:^|[b_./-])[Tt]ests'
13ffe365680b4dcfb99ce446cc4dffe1587755ff
ipython_notebook_config.py
ipython_notebook_config.py
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.tornado_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
Use tornado settings, webapp deprecated
Use tornado settings, webapp deprecated
Python
bsd-3-clause
jupyter/nature-demo,jupyter/nature-demo,jupyter/nature-demo
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } Use tornado settings, webapp deprecated
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.tornado_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
<commit_before># Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } <commit_msg>Use tornado settings, webapp deprecated<commit_after>
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.tornado_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } Use tornado settings, webapp deprecated# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.tornado_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
<commit_before># Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } <commit_msg>Use tornado settings, webapp deprecated<commit_after># Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.tornado_settings = { 'headers': { 'X-Frame-Options': 'ALLOW FROM nature.com' }, 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
8f4c8760dd5f6f21b1c59579332a3c81fa58ed13
buildlet/runner/__init__.py
buildlet/runner/__init__.py
""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): return list(_namemodmap) def run(classname, task, *args, **kwds): runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner
""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): """ Get a runner class named `classname`. """ import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): """ Get a list of runner class names (a list of strings). """ return list(_namemodmap) def run(classname, task, *args, **kwds): """ Run `task` using runner named `classname`. Rest of the arguments are passed to the runner class. Return the instance of the used runner class. """ runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner
Document utility functions in buildlet.runner
Document utility functions in buildlet.runner
Python
bsd-3-clause
tkf/buildlet
""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): return list(_namemodmap) def run(classname, task, *args, **kwds): runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner Document utility functions in buildlet.runner
""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): """ Get a runner class named `classname`. """ import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): """ Get a list of runner class names (a list of strings). """ return list(_namemodmap) def run(classname, task, *args, **kwds): """ Run `task` using runner named `classname`. Rest of the arguments are passed to the runner class. Return the instance of the used runner class. """ runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner
<commit_before>""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): return list(_namemodmap) def run(classname, task, *args, **kwds): runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner <commit_msg>Document utility functions in buildlet.runner<commit_after>
""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): """ Get a runner class named `classname`. """ import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): """ Get a list of runner class names (a list of strings). """ return list(_namemodmap) def run(classname, task, *args, **kwds): """ Run `task` using runner named `classname`. Rest of the arguments are passed to the runner class. Return the instance of the used runner class. """ runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner
""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): return list(_namemodmap) def run(classname, task, *args, **kwds): runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner Document utility functions in buildlet.runner""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): """ Get a runner class named `classname`. """ import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): """ Get a list of runner class names (a list of strings). """ return list(_namemodmap) def run(classname, task, *args, **kwds): """ Run `task` using runner named `classname`. Rest of the arguments are passed to the runner class. Return the instance of the used runner class. """ runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner
<commit_before>""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): return list(_namemodmap) def run(classname, task, *args, **kwds): runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner <commit_msg>Document utility functions in buildlet.runner<commit_after>""" Runner classes to execute tasks. """ _namemodmap = dict( SimpleRunner='simple', IPythonParallelRunner='ipythonparallel', MultiprocessingRunner='multiprocessingpool', ) def getrunner(classname): """ Get a runner class named `classname`. """ import sys module = 'buildlet.runner.{0}'.format(_namemodmap[classname]) __import__(module) return getattr(sys.modules[module], classname) def listrunner(): """ Get a list of runner class names (a list of strings). """ return list(_namemodmap) def run(classname, task, *args, **kwds): """ Run `task` using runner named `classname`. Rest of the arguments are passed to the runner class. Return the instance of the used runner class. """ runner = getrunner(classname)(*args, **kwds) runner.run(task) return runner
62503058850008d7b346d6e6b70943f5e2a1efba
app/taskqueue/celeryconfig.py
app/taskqueue/celeryconfig.py
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1"
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TASK_RESULT_EXPIRES = 3600 CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1"
Set celery task results to expire in 1h
Set celery task results to expire in 1h
Python
lgpl-2.1
kernelci/kernelci-backend,kernelci/kernelci-backend
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1" Set celery task results to expire in 1h
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TASK_RESULT_EXPIRES = 3600 CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1"
<commit_before># This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1" <commit_msg>Set celery task results to expire in 1h<commit_after>
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TASK_RESULT_EXPIRES = 3600 CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1"
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1" Set celery task results to expire in 1h# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TASK_RESULT_EXPIRES = 3600 CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1"
<commit_before># This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1" <commit_msg>Set celery task results to expire in 1h<commit_after># This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Celery configuration values.""" BROKER_URL = "redis://localhost" BROKER_POOL_LIMIT = 20 BROKER_TRANSPORT_OPTIONS = { "visibility_timeout": 60*60*6, "fanout_prefix": True, "fanout_patterns": True } # Use custom json encoder. CELERY_ACCEPT_CONTENT = ["kjson"] CELERY_RESULT_SERIALIZER = "kjson" CELERY_TASK_SERIALIZER = "kjson" CELERY_TASK_RESULT_EXPIRES = 3600 CELERY_TIMEZONE = "UTC" CELERY_ENABLE_UTC = True CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True # Use a different DB than the redis default one. CELERY_RESULT_BACKEND = "redis://localhost/1"
bd971fa5e58db992895df4cb421e10f0e74b70bd
swh/web/browse/urls.py
swh/web/browse/urls.py
# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'person.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns()
# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'browse.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns()
Use correct Django HTML template for default view
browse: Use correct Django HTML template for default view
Python
agpl-3.0
SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui
# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'person.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns() browse: Use correct Django HTML template for default view
# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'browse.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns()
<commit_before># Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'person.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns() <commit_msg>browse: Use correct Django HTML template for default view<commit_after>
# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'browse.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns()
# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'person.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns() browse: Use correct Django HTML template for default view# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'browse.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns()
<commit_before># Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'person.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns() <commit_msg>browse: Use correct Django HTML template for default view<commit_after># Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.identifiers # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.person # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls def default_browse_view(request): """Default django view used as an entry point for the swh browse ui web application. The url that point to it is /browse/. Args: request: input django http request """ return render(request, 'browse.html', {'heading': 'Browse the Software Heritage archive', 'empty_browse': True}) urlpatterns = [ url(r'^$', default_browse_view, name='browse-homepage') ] urlpatterns += BrowseUrls.get_url_patterns()
3272067020ed0f2204716ee77d69b475cf0782a0
numba2/tests/test_classes.py
numba2/tests/test_classes.py
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main()
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, sjit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) @jit def return_obj(x): return C(x) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) def test_return_obj(self): # TODO: Heap types: allocation, returning # TODO: stack allocated types: return by value, pass by pointer, # validate immutability in typechecker obj = return_obj(10) self.assertIsInstance(obj, C) self.assertEqual(obj.x, 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main()
Add (failing) test for returning numba objects
Add (failing) test for returning numba objects
Python
bsd-2-clause
flypy/flypy,flypy/flypy
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main() Add (failing) test for returning numba objects
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, sjit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) @jit def return_obj(x): return C(x) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) def test_return_obj(self): # TODO: Heap types: allocation, returning # TODO: stack allocated types: return by value, pass by pointer, # validate immutability in typechecker obj = return_obj(10) self.assertIsInstance(obj, C) self.assertEqual(obj.x, 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main()
<commit_before># -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main() <commit_msg>Add (failing) test for returning numba objects<commit_after>
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, sjit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) @jit def return_obj(x): return C(x) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) def test_return_obj(self): # TODO: Heap types: allocation, returning # TODO: stack allocated types: return by value, pass by pointer, # validate immutability in typechecker obj = return_obj(10) self.assertIsInstance(obj, C) self.assertEqual(obj.x, 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main()
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main() Add (failing) test for returning numba objects# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, sjit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) @jit def return_obj(x): return C(x) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) def test_return_obj(self): # TODO: Heap types: allocation, returning # TODO: stack allocated types: return by value, pass by pointer, # validate immutability in typechecker obj = return_obj(10) self.assertIsInstance(obj, C) self.assertEqual(obj.x, 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main()
<commit_before># -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main() <commit_msg>Add (failing) test for returning numba objects<commit_after># -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import unittest from numba2 import jit, sjit, int32 #===------------------------------------------------------------------=== # Test code #===------------------------------------------------------------------=== @jit class C(object): layout = {'x': int32} @jit def __init__(self, x): self.x = x @jit def __add__(self, other): return self.x * other.x @jit def method(self, other): return self.x * other.x @jit def call_special(x): return C(x) + C(2) @jit def call_method(x): return C(x).method(C(2)) @jit def return_obj(x): return C(x) #===------------------------------------------------------------------=== # Tests #===------------------------------------------------------------------=== class TestClasses(unittest.TestCase): def test_special_method(self): self.assertEqual(call_special(5), 10) def test_methods(self): self.assertEqual(call_method(5), 10) def test_return_obj(self): # TODO: Heap types: allocation, returning # TODO: stack allocated types: return by value, pass by pointer, # validate immutability in typechecker obj = return_obj(10) self.assertIsInstance(obj, C) self.assertEqual(obj.x, 10) if __name__ == '__main__': #TestClasses('test_special_method').debug() unittest.main()
c3380306512e194543893442ef9327935e789437
tests/integration/blueprints/site/user_message/test_address_formatting.py
tests/integration/blueprints/site/user_message/test_address_formatting.py
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@users.test', 'Alice <alice@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alicia', 'alicia@users.test', 'Alicia <alicia@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param
Rename test user to avoid clash
Rename test user to avoid clash
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@users.test', 'Alice <alice@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param Rename test user to avoid clash
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alicia', 'alicia@users.test', 'Alicia <alicia@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param
<commit_before>""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@users.test', 'Alice <alice@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param <commit_msg>Rename test user to avoid clash<commit_after>
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alicia', 'alicia@users.test', 'Alicia <alicia@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@users.test', 'Alice <alice@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param Rename test user to avoid clash""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alicia', 'alicia@users.test', 'Alicia <alicia@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param
<commit_before>""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@users.test', 'Alice <alice@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param <commit_msg>Rename test user to avoid clash<commit_after>""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.services.user_message import service as user_message_service def test_recipient_formatting(make_user, site, params): screen_name, email_address, expected = params user = make_user(screen_name, email_address=email_address) message = user_message_service.create_message( user.id, user.id, '', '', site.id ) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alicia', 'alicia@users.test', 'Alicia <alicia@users.test>'), ('<AngleInvestor>', 'angleinvestor@users.test', '"<AngleInvestor>" <angleinvestor@users.test>'), ('-=]YOLO[=-', 'yolo@users.test', '"-=]YOLO[=-" <yolo@users.test>'), ]) def params(request): yield request.param
97105453db680c2d99becd10f91604339c970591
linkatos.py
linkatos.py
#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # Main if __name__ == '__main__': # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?")
#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # Main if __name__ == '__main__': # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?")
Move initialisations inside main to avoid running them if not necessary
refactor: Move initialisations inside main to avoid running them if not necessary
Python
mit
iwi/linkatos,iwi/linkatos
#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # Main if __name__ == '__main__': # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?") refactor: Move initialisations inside main to avoid running them if not necessary
#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # Main if __name__ == '__main__': # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?")
<commit_before>#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # Main if __name__ == '__main__': # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?") <commit_msg>refactor: Move initialisations inside main to avoid running them if not necessary<commit_after>
#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # Main if __name__ == '__main__': # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?")
#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # Main if __name__ == '__main__': # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?") refactor: Move initialisations inside main to avoid running them if not necessary#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # Main if __name__ == '__main__': # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?")
<commit_before>#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # Main if __name__ == '__main__': # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?") <commit_msg>refactor: Move initialisations inside main to avoid running them if not necessary<commit_after>#! /usr/bin/env python import os from slackclient import SlackClient import pyrebase import linkatos.firebase as fb import linkatos.activities as activities # Main if __name__ == '__main__': # starterbot environment variables BOT_ID = os.environ.get("BOT_ID") SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN") # instantiate Slack clients slack_client = SlackClient(SLACK_BOT_TOKEN) # firebase environment variables FB_API_KEY = os.environ.get("FB_API_KEY") FB_USER = os.environ.get("FB_USER") FB_PASS = os.environ.get("FB_PASS") fb_credentials = {'username': FB_USER, 'password': FB_PASS} # initialise firebase project_name = 'coses-acbe6' firebase = fb.initialise(FB_API_KEY, project_name) # verify linkatos connection if slack_client.rtm_connect(): parsed_url_message = {} expecting_url = True expecting_reaction = False while True: # note that url is returned to keep it over several cylcles in # whilst we wait for an answer (expecting_url, expecting_reaction, parsed_url_message) = \ activities.event_consumer( expecting_url, expecting_reaction, parsed_url_message, slack_client, fb_credentials, firebase) else: print("Connection failed. Invalid Slack token or bot ID?")
46074e64289995aab5e1129f1eead705a53010b9
learning_journal/models.py
learning_journal/models.py
from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from sqlalchemy.ext.declarative import declarative_base import datetime import psycopg2 from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow)
import datetime import psycopg2 from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from pyramid.security import Allow, Everyone from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) @property def __acl__(self): """Add permissions for specific instance of Entry object. self.author.username is the user who created this Entry instance.""" return [ (Allow, Everyone, 'view'), (Allow, self.author.username, 'edit') ]
Add acl method to Entry model
Add acl method to Entry model
Python
mit
DZwell/learning_journal,DZwell/learning_journal,DZwell/learning_journal
from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from sqlalchemy.ext.declarative import declarative_base import datetime import psycopg2 from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) Add acl method to Entry model
import datetime import psycopg2 from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from pyramid.security import Allow, Everyone from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) @property def __acl__(self): """Add permissions for specific instance of Entry object. self.author.username is the user who created this Entry instance.""" return [ (Allow, Everyone, 'view'), (Allow, self.author.username, 'edit') ]
<commit_before>from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from sqlalchemy.ext.declarative import declarative_base import datetime import psycopg2 from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) <commit_msg>Add acl method to Entry model<commit_after>
import datetime import psycopg2 from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from pyramid.security import Allow, Everyone from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) @property def __acl__(self): """Add permissions for specific instance of Entry object. self.author.username is the user who created this Entry instance.""" return [ (Allow, Everyone, 'view'), (Allow, self.author.username, 'edit') ]
from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from sqlalchemy.ext.declarative import declarative_base import datetime import psycopg2 from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) Add acl method to Entry modelimport datetime import psycopg2 from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from pyramid.security import Allow, Everyone from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) @property def __acl__(self): """Add permissions for specific instance of Entry object. self.author.username is the user who created this Entry instance.""" return [ (Allow, Everyone, 'view'), (Allow, self.author.username, 'edit') ]
<commit_before>from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from sqlalchemy.ext.declarative import declarative_base import datetime import psycopg2 from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) <commit_msg>Add acl method to Entry model<commit_after>import datetime import psycopg2 from sqlalchemy import ( Column, DateTime, Integer, Unicode, UnicodeText, ) from pyramid.security import Allow, Everyone from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, ) from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() class Entry(Base): """Our Journal Entry class.""" __tablename__ = 'entries' id = Column(Integer, primary_key=True) title = Column(Unicode(128), unique=True) text = Column(UnicodeText) created = Column(DateTime, default=datetime.datetime.utcnow) @property def __acl__(self): """Add permissions for specific instance of Entry object. self.author.username is the user who created this Entry instance.""" return [ (Allow, Everyone, 'view'), (Allow, self.author.username, 'edit') ]
c797a3db62b2af1c236527ef95d713b6b0285345
iati/core/tests/test_resources.py
iati/core/tests/test_resources.py
import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 100 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 100 < len(content)
import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 3200 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 130000 < len(content)
Increase resource size check limits
Increase resource size check limits This brings the limits to the greatest value with 2sf that is below the actual value without becoming higher.
Python
mit
IATI/iati.core,IATI/iati.core
import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 100 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 100 < len(content) Increase resource size check limits This brings the limits to the greatest value with 2sf that is below the actual value without becoming higher.
import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 3200 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 130000 < len(content)
<commit_before>import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 100 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 100 < len(content) <commit_msg>Increase resource size check limits This brings the limits to the greatest value with 2sf that is below the actual value without becoming higher.<commit_after>
import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 3200 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 130000 < len(content)
import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 100 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 100 < len(content) Increase resource size check limits This brings the limits to the greatest value with 2sf that is below the actual value without becoming higher.import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 3200 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 130000 < len(content)
<commit_before>import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 100 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 100 < len(content) <commit_msg>Increase resource size check limits This brings the limits to the greatest value with 2sf that is below the actual value without becoming higher.<commit_after>import pytest import iati.core.resources class TestResources(object): """A container for tests relating to resources""" def test_codelist_flow_type(self): """Check that the FlowType codelist contains content""" path = iati.core.resources.path_codelist('FlowType') content = iati.core.resources.load_as_string(path) assert 3200 < len(content) def test_schema_activity(self): """Check that the Activity schema contains content""" path = iati.core.resources.path_schema('iati-activities-schema') content = iati.core.resources.load_as_string(path) assert 130000 < len(content)
28c47a5d810490c234b85efb0b0d3b200b716b4e
config.py
config.py
import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 100 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, }
import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 30 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, }
Reduce results per page to 30
Reduce results per page to 30
Python
mit
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 100 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, } Reduce results per page to 30
import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 30 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, }
<commit_before>import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 100 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, } <commit_msg>Reduce results per page to 30<commit_after>
import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 30 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, }
import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 100 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, } Reduce results per page to 30import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 30 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, }
<commit_before>import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 100 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, } <commit_msg>Reduce results per page to 30<commit_after>import os from dmutils.status import get_version_label basedir = os.path.abspath(os.path.dirname(__file__)) class Config: VERSION = get_version_label( os.path.abspath(os.path.dirname(__file__)) ) AUTH_REQUIRED = True ELASTICSEARCH_HOST = 'localhost:9200' DM_SEARCH_API_AUTH_TOKENS = None DM_SEARCH_PAGE_SIZE = 30 DM_ID_ONLY_SEARCH_PAGE_SIZE_MULTIPLIER = 10 # Logging DM_LOG_LEVEL = 'DEBUG' DM_APP_NAME = 'search-api' DM_PLAIN_TEXT_LOGS = False DM_LOG_PATH = None VCAP_SERVICES = None DM_ELASTICSEARCH_SERVICE_NAME = "search_api_elasticsearch" @staticmethod def init_app(app): pass class Test(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_LOG_LEVEL = 'CRITICAL' DM_SEARCH_API_AUTH_TOKENS = 'valid-token' class Development(Config): DEBUG = True DM_PLAIN_TEXT_LOGS = True DM_SEARCH_PAGE_SIZE = 5 DM_SEARCH_API_AUTH_TOKENS = 'myToken' class Live(Config): DEBUG = False DM_LOG_PATH = '/var/log/digitalmarketplace/application.log' config = { 'development': Development, 'preview': Live, 'staging': Live, 'production': Live, 'test': Test, }
c59c762be198b9c976d25b093860f1d14e9d2271
backend/scripts/ddirdenorm.py
backend/scripts/ddirdenorm.py
#!/usr/bin/env python import rethinkdb as r conn = r.connect('localhost', 30815, db='materialscommons') if __name__ == "__main__": selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
Add options for setting the port.
Add options for setting the port.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
#!/usr/bin/env python import rethinkdb as r conn = r.connect('localhost', 30815, db='materialscommons') if __name__ == "__main__": selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn) Add options for setting the port.
#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
<commit_before>#!/usr/bin/env python import rethinkdb as r conn = r.connect('localhost', 30815, db='materialscommons') if __name__ == "__main__": selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn) <commit_msg>Add options for setting the port.<commit_after>
#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
#!/usr/bin/env python import rethinkdb as r conn = r.connect('localhost', 30815, db='materialscommons') if __name__ == "__main__": selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn) Add options for setting the port.#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
<commit_before>#!/usr/bin/env python import rethinkdb as r conn = r.connect('localhost', 30815, db='materialscommons') if __name__ == "__main__": selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn) <commit_msg>Add options for setting the port.<commit_after>#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
ca908f2e1244af91ecb1c79bb01ec463f6872835
lib/ansible/playbook/attribute.py
lib/ansible/playbook/attribute.py
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass
Fix indentation to be a multiple of 4
Fix indentation to be a multiple of 4
Python
mit
thaim/ansible,thaim/ansible
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass Fix indentation to be a multiple of 4
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass
<commit_before># (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass <commit_msg>Fix indentation to be a multiple of 4<commit_after>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass Fix indentation to be a multiple of 4# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass
<commit_before># (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass <commit_msg>Fix indentation to be a multiple of 4<commit_after># (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type class Attribute: def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, always_post_validate=False): self.isa = isa self.private = private self.default = default self.required = required self.listof = listof self.priority = priority self.always_post_validate = always_post_validate def __cmp__(self, other): return cmp(other.priority, self.priority) class FieldAttribute(Attribute): pass
cc6bb949b0f4a3c4b6344b219f8b5bae2081e0a4
slave/skia_slave_scripts/download_skimage_files.py
slave/skia_slave_scripts/download_skimage_files.py
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import os import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = os.path.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles))
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import posixpath import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = posixpath.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles))
Use posixpath for paths in the cloud.
Use posixpath for paths in the cloud. Fixes build break on Windows. R=borenet@google.com Review URL: https://codereview.chromium.org/18074002 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9792 2bbb7eff-a529-9590-31e7-b0007b416f81
Python
bsd-3-clause
google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import os import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = os.path.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles)) Use posixpath for paths in the cloud. Fixes build break on Windows. R=borenet@google.com Review URL: https://codereview.chromium.org/18074002 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9792 2bbb7eff-a529-9590-31e7-b0007b416f81
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import posixpath import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = posixpath.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles))
<commit_before>#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import os import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = os.path.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles)) <commit_msg>Use posixpath for paths in the cloud. Fixes build break on Windows. R=borenet@google.com Review URL: https://codereview.chromium.org/18074002 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9792 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import posixpath import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = posixpath.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles))
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import os import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = os.path.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles)) Use posixpath for paths in the cloud. Fixes build break on Windows. R=borenet@google.com Review URL: https://codereview.chromium.org/18074002 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9792 2bbb7eff-a529-9590-31e7-b0007b416f81#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import posixpath import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = posixpath.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles))
<commit_before>#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import os import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = os.path.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles)) <commit_msg>Use posixpath for paths in the cloud. Fixes build break on Windows. R=borenet@google.com Review URL: https://codereview.chromium.org/18074002 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9792 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Download the image files needed to run skimage tool. """ from build_step import BuildStep from utils import gs_utils from utils import sync_bucket_subdir import posixpath import sys class DownloadSKImageFiles(BuildStep): def __init__(self, timeout=12800, no_output_timeout=9600, **kwargs): super (DownloadSKImageFiles, self).__init__( timeout=timeout, no_output_timeout=no_output_timeout, **kwargs) def _DownloadSKImagesFromStorage(self): """Copies over image files from Google Storage if the timestamps differ.""" dest_gsbase = (self._args.get('dest_gsbase') or sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) print '\n\n========Downloading image files from Google Storage========\n\n' gs_relative_dir = posixpath.join('skimage', 'input') gs_utils.DownloadDirectoryContentsIfChanged( gs_base=dest_gsbase, gs_relative_dir=gs_relative_dir, local_dir=self._skimage_in_dir) def _Run(self): # Locally copy image files from GoogleStorage. self._DownloadSKImagesFromStorage() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(DownloadSKImageFiles))
e2959ec01b25c3f447fdd31608b30f19c2dc3599
engine.py
engine.py
# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y)
# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) def _is_pos_on_board(coord): u"""Return True if coordinate is on the board.""" x, y = coord if (97 <= x <= 104) and (49 <= y <= 56): return True else: return False
Add _is_pos_on_board() to determine if a position is on the board
Add _is_pos_on_board() to determine if a position is on the board
Python
mit
EyuelAbebe/gamer,EyuelAbebe/gamer
# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) Add _is_pos_on_board() to determine if a position is on the board
# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) def _is_pos_on_board(coord): u"""Return True if coordinate is on the board.""" x, y = coord if (97 <= x <= 104) and (49 <= y <= 56): return True else: return False
<commit_before># Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) <commit_msg>Add _is_pos_on_board() to determine if a position is on the board<commit_after>
# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) def _is_pos_on_board(coord): u"""Return True if coordinate is on the board.""" x, y = coord if (97 <= x <= 104) and (49 <= y <= 56): return True else: return False
# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) Add _is_pos_on_board() to determine if a position is on the board# Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) def _is_pos_on_board(coord): u"""Return True if coordinate is on the board.""" x, y = coord if (97 <= x <= 104) and (49 <= y <= 56): return True else: return False
<commit_before># Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) <commit_msg>Add _is_pos_on_board() to determine if a position is on the board<commit_after># Use x, y coords for unit positions # (97, 56) ... (104, 56) # ... ... # (97, 49) ... (104, 49) # # Algebraic notation for a position is: # algebraic_pos = chr(x) + chr(y) def _coord_to_algebraic(coord): x, y = coord return chr(x) + chr(y) def _algebraic_to_coord(algebraic): x, y = algebraic[0], algebraic[1] return ord(x), ord(y) def _is_pos_on_board(coord): u"""Return True if coordinate is on the board.""" x, y = coord if (97 <= x <= 104) and (49 <= y <= 56): return True else: return False
3671f6f1e3a2e518255a6a04d0aadf52d5fcca97
tests/functions_tests/test_dropout.py
tests/functions_tests/test_dropout.py
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__)
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) self.gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) def check_type_backward(self, x_data, gy_data): x = chainer.Variable(x_data) y = functions.dropout(x) y.grad = gy_data y.backward() def test_type_backward_cpu(self): self.check_type_backward(self.x, self.gy) def test_type_backward_gpu(self): self.check_type_backward( cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) testing.run_module(__name__, __file__)
Add unittest for type check during backward in TestDropout
Add unittest for type check during backward in TestDropout
Python
mit
benob/chainer,truongdq/chainer,ronekko/chainer,sou81821/chainer,t-abe/chainer,anaruse/chainer,kashif/chainer,okuta/chainer,sinhrks/chainer,niboshi/chainer,jnishi/chainer,woodshop/chainer,elviswf/chainer,ktnyt/chainer,keisuke-umezawa/chainer,wkentaro/chainer,wkentaro/chainer,niboshi/chainer,cupy/cupy,1986ks/chainer,jfsantos/chainer,yanweifu/chainer,muupan/chainer,t-abe/chainer,cupy/cupy,niboshi/chainer,keisuke-umezawa/chainer,Kaisuke5/chainer,keisuke-umezawa/chainer,chainer/chainer,kikusu/chainer,chainer/chainer,jnishi/chainer,kiyukuta/chainer,hvy/chainer,hidenori-t/chainer,wkentaro/chainer,ktnyt/chainer,wavelets/chainer,woodshop/complex-chainer,tscohen/chainer,wkentaro/chainer,aonotas/chainer,kikusu/chainer,tkerola/chainer,hvy/chainer,cemoody/chainer,keisuke-umezawa/chainer,pfnet/chainer,kuwa32/chainer,niboshi/chainer,ikasumi/chainer,hvy/chainer,minhpqn/chainer,jnishi/chainer,ytoyama/yans_chainer_hackathon,ysekky/chainer,laysakura/chainer,ktnyt/chainer,delta2323/chainer,benob/chainer,muupan/chainer,masia02/chainer,AlpacaDB/chainer,tigerneil/chainer,chainer/chainer,jnishi/chainer,rezoo/chainer,cupy/cupy,AlpacaDB/chainer,truongdq/chainer,okuta/chainer,hvy/chainer,chainer/chainer,sinhrks/chainer,umitanuki/chainer,ktnyt/chainer,okuta/chainer,okuta/chainer,cupy/cupy
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__) Add unittest for type check during backward in TestDropout
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) self.gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) def check_type_backward(self, x_data, gy_data): x = chainer.Variable(x_data) y = functions.dropout(x) y.grad = gy_data y.backward() def test_type_backward_cpu(self): self.check_type_backward(self.x, self.gy) def test_type_backward_gpu(self): self.check_type_backward( cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) testing.run_module(__name__, __file__)
<commit_before>import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__) <commit_msg>Add unittest for type check during backward in TestDropout<commit_after>
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) self.gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) def check_type_backward(self, x_data, gy_data): x = chainer.Variable(x_data) y = functions.dropout(x) y.grad = gy_data y.backward() def test_type_backward_cpu(self): self.check_type_backward(self.x, self.gy) def test_type_backward_gpu(self): self.check_type_backward( cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) testing.run_module(__name__, __file__)
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__) Add unittest for type check during backward in TestDropoutimport unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) self.gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) def check_type_backward(self, x_data, gy_data): x = chainer.Variable(x_data) y = functions.dropout(x) y.grad = gy_data y.backward() def test_type_backward_cpu(self): self.check_type_backward(self.x, self.gy) def test_type_backward_gpu(self): self.check_type_backward( cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) testing.run_module(__name__, __file__)
<commit_before>import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__) <commit_msg>Add unittest for type check during backward in TestDropout<commit_after>import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) self.gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) functions.dropout(x) def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) def check_type_backward(self, x_data, gy_data): x = chainer.Variable(x_data) y = functions.dropout(x) y.grad = gy_data y.backward() def test_type_backward_cpu(self): self.check_type_backward(self.x, self.gy) def test_type_backward_gpu(self): self.check_type_backward( cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) testing.run_module(__name__, __file__)
5dcac8299e754a4209f6f4177eb0df8ecea914c1
namelist.py
namelist.py
#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print hex(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1])
#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print '0x{0:04X}'.format(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1])
Format codepoints according to the Namelist spec.
[tools] Format codepoints according to the Namelist spec.
Python
apache-2.0
googlefonts/gftools,googlefonts/gftools
#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print hex(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1]) [tools] Format codepoints according to the Namelist spec.
#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print '0x{0:04X}'.format(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1])
<commit_before>#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print hex(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1]) <commit_msg>[tools] Format codepoints according to the Namelist spec.<commit_after>
#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print '0x{0:04X}'.format(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1])
#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print hex(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1]) [tools] Format codepoints according to the Namelist spec.#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print '0x{0:04X}'.format(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1])
<commit_before>#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print hex(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1]) <commit_msg>[tools] Format codepoints according to the Namelist spec.<commit_after>#!/usr/bin/env python # Copyright 2015, Google Inc. # Author: Dave Crossland (dave@understandinglimited.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # namelist.py: A fontTools python script for generating namelist files # # Usage: # # $ namelist.py Font.ttf > NameList.nam import sys from fontTools.ttLib import TTFont from fontTools.unicode import Unicode def main(file_name): excluded_chars = ["????", "SPACE", "NO-BREAK SPACE"] font = TTFont(file_name) for cmap in font["cmap"].tables: char_list = sorted(cmap.cmap.items()) for item in char_list: item_description = Unicode[item[0]] if item_description not in excluded_chars: print '0x{0:04X}'.format(item[0]), item_description font.close() if __name__ == '__main__': main(sys.argv[1])
8dcbb031aa00afc35900243142d8f49814834d19
powerline/renderers/ipython.py
powerline/renderers/ipython.py
# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] renderer = IpythonRenderer
# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] def shutdown(self): self.theme.shutdown() for match in self.local_themes.values(): if 'theme' in match: match['theme'].shutdown() renderer = IpythonRenderer
Make IPython renderer shutdown properly
Make IPython renderer shutdown properly
Python
mit
S0lll0s/powerline,IvanAli/powerline,Liangjianghao/powerline,cyrixhero/powerline,blindFS/powerline,bezhermoso/powerline,blindFS/powerline,EricSB/powerline,keelerm84/powerline,QuLogic/powerline,cyrixhero/powerline,darac/powerline,magus424/powerline,cyrixhero/powerline,dragon788/powerline,prvnkumar/powerline,keelerm84/powerline,DoctorJellyface/powerline,dragon788/powerline,firebitsbr/powerline,DoctorJellyface/powerline,s0undt3ch/powerline,dragon788/powerline,IvanAli/powerline,bartvm/powerline,s0undt3ch/powerline,xxxhycl2010/powerline,lukw00/powerline,darac/powerline,prvnkumar/powerline,EricSB/powerline,Luffin/powerline,lukw00/powerline,xxxhycl2010/powerline,blindFS/powerline,russellb/powerline,S0lll0s/powerline,firebitsbr/powerline,seanfisk/powerline,junix/powerline,lukw00/powerline,areteix/powerline,kenrachynski/powerline,xfumihiro/powerline,bezhermoso/powerline,russellb/powerline,DoctorJellyface/powerline,russellb/powerline,wfscheper/powerline,areteix/powerline,Liangjianghao/powerline,QuLogic/powerline,seanfisk/powerline,junix/powerline,xfumihiro/powerline,junix/powerline,darac/powerline,magus424/powerline,prvnkumar/powerline,Liangjianghao/powerline,wfscheper/powerline,xxxhycl2010/powerline,firebitsbr/powerline,kenrachynski/powerline,s0undt3ch/powerline,areteix/powerline,magus424/powerline,kenrachynski/powerline,Luffin/powerline,Luffin/powerline,wfscheper/powerline,EricSB/powerline,seanfisk/powerline,bartvm/powerline,QuLogic/powerline,S0lll0s/powerline,bartvm/powerline,xfumihiro/powerline,IvanAli/powerline,bezhermoso/powerline
# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] renderer = IpythonRenderer Make IPython renderer shutdown properly
# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] def shutdown(self): self.theme.shutdown() for match in self.local_themes.values(): if 'theme' in match: match['theme'].shutdown() renderer = IpythonRenderer
<commit_before># vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] renderer = IpythonRenderer <commit_msg>Make IPython renderer shutdown properly<commit_after>
# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] def shutdown(self): self.theme.shutdown() for match in self.local_themes.values(): if 'theme' in match: match['theme'].shutdown() renderer = IpythonRenderer
# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] renderer = IpythonRenderer Make IPython renderer shutdown properly# vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] def shutdown(self): self.theme.shutdown() for match in self.local_themes.values(): if 'theme' in match: match['theme'].shutdown() renderer = IpythonRenderer
<commit_before># vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] renderer = IpythonRenderer <commit_msg>Make IPython renderer shutdown properly<commit_after># vim:fileencoding=utf-8:noet from powerline.renderers.shell import ShellRenderer from powerline.theme import Theme class IpythonRenderer(ShellRenderer): '''Powerline ipython segment renderer.''' escape_hl_start = '\x01' escape_hl_end = '\x02' def get_segment_info(self, segment_info): r = self.segment_info.copy() r['ipython'] = segment_info return r def get_theme(self, matcher_info): if matcher_info == 'in': return self.theme else: match = self.local_themes[matcher_info] try: return match['theme'] except KeyError: match['theme'] = Theme(theme_config=match['config'], top_theme_config=self.theme_config, **self.theme_kwargs) return match['theme'] def shutdown(self): self.theme.shutdown() for match in self.local_themes.values(): if 'theme' in match: match['theme'].shutdown() renderer = IpythonRenderer
7cbc6ae58357ef647a007e1b505884e523d924c2
numba/tests/test_ctypes_call.py
numba/tests/test_ctypes_call.py
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
Fix ctypes call test for windows
Fix ctypes call test for windows
Python
bsd-2-clause
sklam/numba,pitrou/numba,GaZ3ll3/numba,numba/numba,jriehl/numba,sklam/numba,IntelLabs/numba,IntelLabs/numba,jriehl/numba,pitrou/numba,shiquanwang/numba,gdementen/numba,stonebig/numba,pombredanne/numba,gmarkall/numba,cpcloud/numba,stonebig/numba,jriehl/numba,ssarangi/numba,gdementen/numba,stuartarchibald/numba,ssarangi/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,ssarangi/numba,gmarkall/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,sklam/numba,gdementen/numba,stuartarchibald/numba,jriehl/numba,stefanseefeld/numba,jriehl/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,cpcloud/numba,stonebig/numba,seibert/numba,pitrou/numba,numba/numba,ssarangi/numba,stefanseefeld/numba,stonebig/numba,seibert/numba,pitrou/numba,pombredanne/numba,pombredanne/numba,IntelLabs/numba,stefanseefeld/numba,pitrou/numba,numba/numba,GaZ3ll3/numba,GaZ3ll3/numba,shiquanwang/numba,IntelLabs/numba,stuartarchibald/numba,pombredanne/numba,gdementen/numba,seibert/numba,shiquanwang/numba,sklam/numba,stefanseefeld/numba,sklam/numba,numba/numba,numba/numba,gdementen/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,stefanseefeld/numba,seibert/numba,ssarangi/numba,pombredanne/numba
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()Fix ctypes call test for windows
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
<commit_before>import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()<commit_msg>Fix ctypes call test for windows<commit_after>
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()Fix ctypes call test for windowsimport os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
<commit_before>import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] assert call_ctypes_func(puts, "Hello World!") libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()<commit_msg>Fix ctypes call test for windows<commit_after>import os import ctypes from numba import * @autojit(backend='ast', nopython=True) def call_ctypes_func(func, value): return func(value) def test_ctypes_calls(): # Test puts for no segfault libc = ctypes.CDLL(ctypes.util.find_library('c')) puts = libc.puts puts.argtypes = [ctypes.c_char_p] call_ctypes_func(puts, "Hello World!") # Test ceil result libm = ctypes.CDLL(ctypes.util.find_library('m')) ceil = libm.ceil ceil.argtypes = [ctypes.c_double] ceil.restype = ctypes.c_double assert call_ctypes_func(ceil, 10.1) == 11.0 def test_str_return(): try: import errno except ImportError: return libc = ctypes.CDLL(ctypes.util.find_library('c')) strerror = libc.strerror strerror.argtypes = [ctypes.c_int] strerror.restype = ctypes.c_char_p expected = os.strerror(errno.EACCES) got = call_ctypes_func(strerror, errno.EACCES) assert expected == got if __name__ == "__main__": test_ctypes_calls() # test_str_return()
65050e11fd951968b100640c503c0ca7999283c0
templates/quantum_conf_template.py
templates/quantum_conf_template.py
import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """)
import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """)
Add the new contrail extension to neutron plugin config file
Add the new contrail extension to neutron plugin config file Change-Id: I2a1e90a2ca31314b7a214943b0f312471b11da9f
Python
apache-2.0
Juniper/contrail-provisioning,Juniper/contrail-provisioning
import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """) Add the new contrail extension to neutron plugin config file Change-Id: I2a1e90a2ca31314b7a214943b0f312471b11da9f
import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """)
<commit_before>import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """) <commit_msg>Add the new contrail extension to neutron plugin config file Change-Id: I2a1e90a2ca31314b7a214943b0f312471b11da9f<commit_after>
import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """)
import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """) Add the new contrail extension to neutron plugin config file Change-Id: I2a1e90a2ca31314b7a214943b0f312471b11da9fimport string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """)
<commit_before>import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """) <commit_msg>Add the new contrail extension to neutron plugin config file Change-Id: I2a1e90a2ca31314b7a214943b0f312471b11da9f<commit_after>import string template = string.Template(""" [APISERVER] api_server_ip = $__contrail_api_server_ip__ api_server_port = $__contrail_api_server_port__ multi_tenancy = $__contrail_multi_tenancy__ contrail_extensions = ipam:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_ipam.NeutronPluginContrailIpam,policy:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_policy.NeutronPluginContrailPolicy,route-table:neutron_plugin_contrail.plugins.opencontrail.contrail_plugin_vpc.NeutronPluginContrailVpc,contrail:None [KEYSTONE] ;auth_url = $__contrail_ks_auth_protocol__://$__contrail_keystone_ip__:$__contrail_ks_auth_port__/v2.0 ;admin_token = $__contrail_admin_token__ admin_user=$__contrail_admin_user__ admin_password=$__contrail_admin_password__ admin_tenant_name=$__contrail_admin_tenant_name__ """)
687c940a6c83564c14299976755b58c3c3f35dfc
bot/api/telegram.py
bot/api/telegram.py
import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass
import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params, timeout=60) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass
Add timeout to requests so that they do get death-locked
Add timeout to requests so that they do get death-locked
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass Add timeout to requests so that they do get death-locked
import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params, timeout=60) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass
<commit_before>import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass <commit_msg>Add timeout to requests so that they do get death-locked<commit_after>
import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params, timeout=60) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass
import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass Add timeout to requests so that they do get death-lockedimport requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params, timeout=60) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass
<commit_before>import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass <commit_msg>Add timeout to requests so that they do get death-locked<commit_after>import requests from bot.api.domain import ApiObject class TelegramBotApi: def __init__(self, auth_token, debug: bool): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" self.debug = debug def get_me(self): return self.__send_request("getMe") def send_message(self, chat_id, text): return self.__send_request("sendMessage", chat_id=chat_id, text=text) def get_updates(self, offset=None, timeout=None): return self.__send_request("getUpdates", offset=offset, timeout=timeout) def __send_request(self, command, **params): request = requests.get(self.base_url + command, params=params, timeout=60) self.__log_request(request) response = request.json() self.__log_response(response) if not response["ok"]: raise TelegramBotApiException(response["description"]) return ApiObject.wrap_api_object(response["result"]) def __log_request(self, request): if self.debug: print(">> " + request.url) def __log_response(self, response): if self.debug: print("<< " + str(response)) class TelegramBotApiException(Exception): pass
cadd2539d5468041599d86ffc94e7a39d83c8759
croquemort/migrations.py
croquemort/migrations.py
from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type)
from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if data and urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type)
Fix an access key `url` bug if data is empty
Fix an access key `url` bug if data is empty
Python
mit
davidbgk/croquemort,opendatateam/croquemort,opendatateam/croquemort,davidbgk/croquemort
from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type) Fix an access key `url` bug if data is empty
from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if data and urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type)
<commit_before>from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type) <commit_msg>Fix an access key `url` bug if data is empty<commit_after>
from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if data and urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type)
from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type) Fix an access key `url` bug if data is emptyfrom urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if data and urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type)
<commit_before>from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type) <commit_msg>Fix an access key `url` bug if data is empty<commit_after>from urllib.parse import urlparse import logbook from nameko.rpc import rpc from .logger import LoggingDependency from .storages import RedisStorage log = logbook.debug class MigrationsService(object): name = 'migrations' storage = RedisStorage() logger = LoggingDependency(interval='ms') @rpc def delete_urls_for(self, domain): log('Deleting URLs for domain {domain}'.format(domain=domain)) for url_hash, data in self.storage.get_all_urls(): if data and urlparse(data['url']).netloc == domain: self.storage.delete_url(url_hash) @rpc def split_content_types(self): log('Splitting content types') for url_hash, data in self.storage.get_all_urls(): content_type = data.get('content-type') if content_type and ';' in content_type: self.storage.store_content_type(url_hash, content_type)
540a589a3d20b4841e9a9c936673cc30a2f0a9ff
csportal/portal/views.py
csportal/portal/views.py
from django.shortcuts import render def home(request): return render(request, 'portal/home.html')
from django.shortcuts import render def home(request): return render(request, 'portal/home.html') def about(request): return render(request, 'portal/home.html')
Add an a function(view) about which is going to render about page when it is requested
Add an a function(view) about which is going to render about page when it is requested
Python
mit
utailab/cs-portal,utailab/cs-portal
from django.shortcuts import render def home(request): return render(request, 'portal/home.html') Add an a function(view) about which is going to render about page when it is requested
from django.shortcuts import render def home(request): return render(request, 'portal/home.html') def about(request): return render(request, 'portal/home.html')
<commit_before>from django.shortcuts import render def home(request): return render(request, 'portal/home.html') <commit_msg>Add an a function(view) about which is going to render about page when it is requested<commit_after>
from django.shortcuts import render def home(request): return render(request, 'portal/home.html') def about(request): return render(request, 'portal/home.html')
from django.shortcuts import render def home(request): return render(request, 'portal/home.html') Add an a function(view) about which is going to render about page when it is requestedfrom django.shortcuts import render def home(request): return render(request, 'portal/home.html') def about(request): return render(request, 'portal/home.html')
<commit_before>from django.shortcuts import render def home(request): return render(request, 'portal/home.html') <commit_msg>Add an a function(view) about which is going to render about page when it is requested<commit_after>from django.shortcuts import render def home(request): return render(request, 'portal/home.html') def about(request): return render(request, 'portal/home.html')
5e86c516927bca9089541fdc3b60616bee8ec117
scripts/analytics/tabulate_emails.py
scripts/analytics/tabulate_emails.py
# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' USER_ID = 'icpnw' FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'date_confirmed': { '$gte': datetime.datetime.utcnow() - delta, } }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main()
# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' # Daily updates project USER_ID = 'icpnw' # Josh FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'is_registered': True, 'password': {'$ne': None}, 'is_merged': {'$ne': True}, 'date_confirmed': {'$gte': datetime.datetime.utcnow() - delta}, }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main()
Make user metric criteria consistent.
Make user metric criteria consistent. [Resolves #1768]
Python
apache-2.0
chennan47/osf.io,zachjanicki/osf.io,jolene-esposito/osf.io,haoyuchen1992/osf.io,Nesiehr/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,felliott/osf.io,cwisecarver/osf.io,caneruguz/osf.io,GageGaskins/osf.io,pattisdr/osf.io,acshi/osf.io,fabianvf/osf.io,njantrania/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,kushG/osf.io,reinaH/osf.io,baylee-d/osf.io,cslzchen/osf.io,rdhyee/osf.io,njantrania/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,arpitar/osf.io,mluo613/osf.io,mluo613/osf.io,jmcarp/osf.io,binoculars/osf.io,kwierman/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,wearpants/osf.io,mfraezz/osf.io,abought/osf.io,jnayak1/osf.io,sloria/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,kwierman/osf.io,doublebits/osf.io,leb2dg/osf.io,revanthkolli/osf.io,fabianvf/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,mfraezz/osf.io,zkraime/osf.io,mluke93/osf.io,cosenal/osf.io,hmoco/osf.io,cwisecarver/osf.io,revanthkolli/osf.io,jinluyuan/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,binoculars/osf.io,petermalcolm/osf.io,Ghalko/osf.io,GageGaskins/osf.io,hmoco/osf.io,abought/osf.io,binoculars/osf.io,doublebits/osf.io,Ghalko/osf.io,himanshuo/osf.io,icereval/osf.io,sloria/osf.io,MerlinZhang/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,HarryRybacki/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,cslzchen/osf.io,ckc6cz/osf.io,revanthkolli/osf.io,TomHeatwole/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,kushG/osf.io,felliott/osf.io,dplorimer/osf,GaryKriebel/osf.io,felliott/osf.io,petermalcolm/osf.io,billyhunt/osf.io,jnayak1/osf.io,aaxelb/osf.io,ticklemepierce/osf.io,monikagrabowska/osf.io,cldershem/osf.io,doublebits/osf.io,samchrisinger/osf.io,mattclark/osf.io,reinaH/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,adlius/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,sbt9uc/osf.io,petermalcolm/osf.io,samanehsan/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,aaxelb/osf.io,abought/osf.io,danielneis/osf.io,Nesiehr/osf.io,KAsante95/osf.io,jmcarp/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,samchrisinger/osf.io,mattclark/osf.io,emetsger/osf.io,jinluyuan/osf.io,hmoco/osf.io,danielneis/osf.io,SSJohns/osf.io,MerlinZhang/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,adlius/osf.io,asanfilippo7/osf.io,sbt9uc/osf.io,bdyetton/prettychart,arpitar/osf.io,KAsante95/osf.io,himanshuo/osf.io,GaryKriebel/osf.io,TomHeatwole/osf.io,jolene-esposito/osf.io,icereval/osf.io,crcresearch/osf.io,ckc6cz/osf.io,cwisecarver/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,doublebits/osf.io,barbour-em/osf.io,barbour-em/osf.io,zkraime/osf.io,barbour-em/osf.io,acshi/osf.io,mluke93/osf.io,emetsger/osf.io,kch8qx/osf.io,erinspace/osf.io,mattclark/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,sbt9uc/osf.io,aaxelb/osf.io,kch8qx/osf.io,billyhunt/osf.io,dplorimer/osf,mluo613/osf.io,jnayak1/osf.io,kushG/osf.io,RomanZWang/osf.io,cslzchen/osf.io,kch8qx/osf.io,revanthkolli/osf.io,pattisdr/osf.io,MerlinZhang/osf.io,leb2dg/osf.io,baylee-d/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,lyndsysimon/osf.io,jmcarp/osf.io,zkraime/osf.io,ZobairAlijan/osf.io,lamdnhan/osf.io,adlius/osf.io,laurenrevere/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,bdyetton/prettychart,alexschiller/osf.io,njantrania/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,emetsger/osf.io,alexschiller/osf.io,cosenal/osf.io,alexschiller/osf.io,TomBaxter/osf.io,himanshuo/osf.io,asanfilippo7/osf.io,reinaH/osf.io,billyhunt/osf.io,chennan47/osf.io,samchrisinger/osf.io,leb2dg/osf.io,ckc6cz/osf.io,brandonPurvis/osf.io,lyndsysimon/osf.io,erinspace/osf.io,felliott/osf.io,mluke93/osf.io,reinaH/osf.io,danielneis/osf.io,monikagrabowska/osf.io,kwierman/osf.io,zamattiac/osf.io,billyhunt/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,caneruguz/osf.io,danielneis/osf.io,zachjanicki/osf.io,bdyetton/prettychart,jmcarp/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,cldershem/osf.io,amyshi188/osf.io,RomanZWang/osf.io,barbour-em/osf.io,icereval/osf.io,aaxelb/osf.io,rdhyee/osf.io,jinluyuan/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,GaryKriebel/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,lyndsysimon/osf.io,wearpants/osf.io,mluo613/osf.io,amyshi188/osf.io,zamattiac/osf.io,haoyuchen1992/osf.io,lamdnhan/osf.io,fabianvf/osf.io,abought/osf.io,emetsger/osf.io,KAsante95/osf.io,cosenal/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,caseyrygt/osf.io,njantrania/osf.io,cosenal/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,dplorimer/osf,acshi/osf.io,RomanZWang/osf.io,kushG/osf.io,chrisseto/osf.io,erinspace/osf.io,Nesiehr/osf.io,SSJohns/osf.io,TomBaxter/osf.io,jeffreyliu3230/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,acshi/osf.io,bdyetton/prettychart,Johnetordoff/osf.io,mluo613/osf.io,ckc6cz/osf.io,zamattiac/osf.io,lamdnhan/osf.io,fabianvf/osf.io,chrisseto/osf.io,pattisdr/osf.io,caseyrollins/osf.io,lyndsysimon/osf.io,asanfilippo7/osf.io,chennan47/osf.io,arpitar/osf.io,jinluyuan/osf.io,hmoco/osf.io,arpitar/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,sloria/osf.io,caseyrygt/osf.io,HarryRybacki/osf.io,kch8qx/osf.io,Ghalko/osf.io,jeffreyliu3230/osf.io,wearpants/osf.io,dplorimer/osf,baylee-d/osf.io,wearpants/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,cldershem/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,lamdnhan/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,rdhyee/osf.io,caseyrollins/osf.io,doublebits/osf.io,crcresearch/osf.io,amyshi188/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,SSJohns/osf.io,zamattiac/osf.io,GaryKriebel/osf.io,TomBaxter/osf.io,HarryRybacki/osf.io,cldershem/osf.io,alexschiller/osf.io,zachjanicki/osf.io,mluke93/osf.io,cwisecarver/osf.io,zkraime/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,acshi/osf.io,himanshuo/osf.io
# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' USER_ID = 'icpnw' FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'date_confirmed': { '$gte': datetime.datetime.utcnow() - delta, } }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main() Make user metric criteria consistent. [Resolves #1768]
# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' # Daily updates project USER_ID = 'icpnw' # Josh FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'is_registered': True, 'password': {'$ne': None}, 'is_merged': {'$ne': True}, 'date_confirmed': {'$gte': datetime.datetime.utcnow() - delta}, }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main()
<commit_before># -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' USER_ID = 'icpnw' FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'date_confirmed': { '$gte': datetime.datetime.utcnow() - delta, } }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main() <commit_msg>Make user metric criteria consistent. [Resolves #1768]<commit_after>
# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' # Daily updates project USER_ID = 'icpnw' # Josh FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'is_registered': True, 'password': {'$ne': None}, 'is_merged': {'$ne': True}, 'date_confirmed': {'$gte': datetime.datetime.utcnow() - delta}, }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main()
# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' USER_ID = 'icpnw' FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'date_confirmed': { '$gte': datetime.datetime.utcnow() - delta, } }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main() Make user metric criteria consistent. [Resolves #1768]# -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' # Daily updates project USER_ID = 'icpnw' # Josh FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'is_registered': True, 'password': {'$ne': None}, 'is_merged': {'$ne': True}, 'date_confirmed': {'$gte': datetime.datetime.utcnow() - delta}, }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main()
<commit_before># -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' USER_ID = 'icpnw' FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'date_confirmed': { '$gte': datetime.datetime.utcnow() - delta, } }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main() <commit_msg>Make user metric criteria consistent. [Resolves #1768]<commit_after># -*- coding: utf-8 -*- """Scripts for counting recently added users by email domain; pushes results to the specified project. """ import datetime import collections from cStringIO import StringIO from dateutil.relativedelta import relativedelta from framework.mongo import database from website import models from website.app import app, init_app from scripts.analytics import utils NODE_ID = '95nv8' # Daily updates project USER_ID = 'icpnw' # Josh FILE_NAME = 'daily-users.csv' CONTENT_TYPE = 'text/csv' TIME_DELTA = relativedelta(days=1) def get_emails(query=None): users = database['user'].find(query, {'username': True}) counts = collections.Counter( user['username'].split('@')[-1] for user in users ) return counts.most_common() def get_emails_since(delta): return get_emails({ 'is_registered': True, 'password': {'$ne': None}, 'is_merged': {'$ne': True}, 'date_confirmed': {'$gte': datetime.datetime.utcnow() - delta}, }) def main(): node = models.Node.load(NODE_ID) user = models.User.load(USER_ID) emails = get_emails_since(TIME_DELTA) sio = StringIO() utils.make_csv(sio, emails, ['affiliation', 'count']) utils.send_file(app, FILE_NAME, CONTENT_TYPE, sio, node, user) if __name__ == '__main__': init_app() main()
2ed36e3f99d0dfb1f66e141f96a0eec79a81c7a5
tdb/concatenate.py
tdb/concatenate.py
import argparse parser = argparse.ArgumentParser() parser.add_argument('-f', '--files', nargs='*', default=[], help="tsvs that will be concatenated") parser.add_argument('-o', '--output', type=str, default="data/titers_complete.tsv") def concat(files,out): with open(out, 'w') as o: for filename in files: print "Concatenating and annotating %s into %s." % (filename, out) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "none" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "egg" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): print line line = line.strip() l = "%s\t%s\t%s\n" % (line, source, passage) o.write(l) if __name__=="__main__": args = parser.parse_args() concat(args.files, args.output)
import argparse parser = argparse.ArgumentParser() parser.add_argument('files', nargs='+', default=[], help="tsvs that will be concatenated") def concat(files): for filename in files: print "Concatenating and annotating %s." % (filename) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "unknown" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "cell" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): line = line.strip() l = "%s\t%s\t%s" % (line, source, passage) print l if __name__=="__main__": args = parser.parse_args() concat(args.files)
Change input method, write to stdout and fix minor issues.
Change input method, write to stdout and fix minor issues.
Python
agpl-3.0
blab/nextstrain-db,nextstrain/fauna,blab/nextstrain-db,nextstrain/fauna
import argparse parser = argparse.ArgumentParser() parser.add_argument('-f', '--files', nargs='*', default=[], help="tsvs that will be concatenated") parser.add_argument('-o', '--output', type=str, default="data/titers_complete.tsv") def concat(files,out): with open(out, 'w') as o: for filename in files: print "Concatenating and annotating %s into %s." % (filename, out) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "none" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "egg" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): print line line = line.strip() l = "%s\t%s\t%s\n" % (line, source, passage) o.write(l) if __name__=="__main__": args = parser.parse_args() concat(args.files, args.output) Change input method, write to stdout and fix minor issues.
import argparse parser = argparse.ArgumentParser() parser.add_argument('files', nargs='+', default=[], help="tsvs that will be concatenated") def concat(files): for filename in files: print "Concatenating and annotating %s." % (filename) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "unknown" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "cell" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): line = line.strip() l = "%s\t%s\t%s" % (line, source, passage) print l if __name__=="__main__": args = parser.parse_args() concat(args.files)
<commit_before>import argparse parser = argparse.ArgumentParser() parser.add_argument('-f', '--files', nargs='*', default=[], help="tsvs that will be concatenated") parser.add_argument('-o', '--output', type=str, default="data/titers_complete.tsv") def concat(files,out): with open(out, 'w') as o: for filename in files: print "Concatenating and annotating %s into %s." % (filename, out) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "none" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "egg" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): print line line = line.strip() l = "%s\t%s\t%s\n" % (line, source, passage) o.write(l) if __name__=="__main__": args = parser.parse_args() concat(args.files, args.output) <commit_msg>Change input method, write to stdout and fix minor issues.<commit_after>
import argparse parser = argparse.ArgumentParser() parser.add_argument('files', nargs='+', default=[], help="tsvs that will be concatenated") def concat(files): for filename in files: print "Concatenating and annotating %s." % (filename) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "unknown" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "cell" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): line = line.strip() l = "%s\t%s\t%s" % (line, source, passage) print l if __name__=="__main__": args = parser.parse_args() concat(args.files)
import argparse parser = argparse.ArgumentParser() parser.add_argument('-f', '--files', nargs='*', default=[], help="tsvs that will be concatenated") parser.add_argument('-o', '--output', type=str, default="data/titers_complete.tsv") def concat(files,out): with open(out, 'w') as o: for filename in files: print "Concatenating and annotating %s into %s." % (filename, out) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "none" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "egg" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): print line line = line.strip() l = "%s\t%s\t%s\n" % (line, source, passage) o.write(l) if __name__=="__main__": args = parser.parse_args() concat(args.files, args.output) Change input method, write to stdout and fix minor issues.import argparse parser = argparse.ArgumentParser() parser.add_argument('files', nargs='+', default=[], help="tsvs that will be concatenated") def concat(files): for filename in files: print "Concatenating and annotating %s." % (filename) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "unknown" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "cell" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): line = line.strip() l = "%s\t%s\t%s" % (line, source, passage) print l if __name__=="__main__": args = parser.parse_args() concat(args.files)
<commit_before>import argparse parser = argparse.ArgumentParser() parser.add_argument('-f', '--files', nargs='*', default=[], help="tsvs that will be concatenated") parser.add_argument('-o', '--output', type=str, default="data/titers_complete.tsv") def concat(files,out): with open(out, 'w') as o: for filename in files: print "Concatenating and annotating %s into %s." % (filename, out) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "none" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "egg" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): print line line = line.strip() l = "%s\t%s\t%s\n" % (line, source, passage) o.write(l) if __name__=="__main__": args = parser.parse_args() concat(args.files, args.output) <commit_msg>Change input method, write to stdout and fix minor issues.<commit_after>import argparse parser = argparse.ArgumentParser() parser.add_argument('files', nargs='+', default=[], help="tsvs that will be concatenated") def concat(files): for filename in files: print "Concatenating and annotating %s." % (filename) if "cdc" in filename.lower(): source = "cdc" elif "crick" in filename.lower(): source = "crick" else: source = "unknown" if "egg" in filename.lower(): passage = "egg" elif "cell" in filename.lower(): passage = "cell" else: passage = "none" with open(filename, 'r') as f: for line in f.readlines(): line = line.strip() l = "%s\t%s\t%s" % (line, source, passage) print l if __name__=="__main__": args = parser.parse_args() concat(args.files)
d5966f60491c408eede66221bc820e4ede93fc0c
pyramid_keystone/__init__.py
pyramid_keystone/__init__.py
default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register)
default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
Add directive allowing users to use our auth policy
Add directive allowing users to use our auth policy
Python
isc
bertjwregeer/pyramid_keystone
default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) Add directive allowing users to use our auth policy
default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
<commit_before> default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) <commit_msg>Add directive allowing users to use our auth policy<commit_after>
default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) Add directive allowing users to use our auth policy default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
<commit_before> default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) <commit_msg>Add directive allowing users to use our auth policy<commit_after> default_settings = [ ('auth_url', str, 'http://localhost:5000/v3'), ('region', str, 'RegionOne'), ('user_domain_name', str, 'Default'), ('cacert', str, ''), ] def parse_settings(settings): parsed = {} def populate(name, convert, default): sname = '%s%s' % ('keystone.', name) value = convert(settings.get(sname, default)) parsed[sname] = value for name, convert, default in default_settings: populate(name, convert, default) return parsed def includeme(config): """ Set up standard configurator registrations. Use via: .. code-block:: python config = Configurator() config.include('pyramid_keystone') """ # We use an action so that the user can include us, and then add the # required variables, upon commit we will pick up those changes. def register(): registry = config.registry settings = parse_settings(registry.settings) registry.settings.update(settings) config.action('keystone-configure', register) config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
47f46e3237ba2f746193e9074136f805e71bacec
pysteps/cascade/interface.py
pysteps/cascade/interface.py
from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None
from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() else: raise TypeError("Only strings supported for the method's names.\n" + "Available names:" + str(list(_cascade_methods.keys()))) from None try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None
Raise exception on incorrect argument type
Raise exception on incorrect argument type
Python
bsd-3-clause
pySTEPS/pysteps
from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None Raise exception on incorrect argument type
from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() else: raise TypeError("Only strings supported for the method's names.\n" + "Available names:" + str(list(_cascade_methods.keys()))) from None try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None
<commit_before> from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None <commit_msg>Raise exception on incorrect argument type<commit_after>
from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() else: raise TypeError("Only strings supported for the method's names.\n" + "Available names:" + str(list(_cascade_methods.keys()))) from None try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None
from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None Raise exception on incorrect argument typefrom pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() else: raise TypeError("Only strings supported for the method's names.\n" + "Available names:" + str(list(_cascade_methods.keys()))) from None try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None
<commit_before> from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None <commit_msg>Raise exception on incorrect argument type<commit_after>from pysteps.cascade import decomposition, bandpass_filters _cascade_methods = dict() _cascade_methods['fft'] = decomposition.decomposition_fft _cascade_methods['gaussian'] = bandpass_filters.filter_gaussian _cascade_methods['uniform'] = bandpass_filters.filter_uniform def get_method(name): """ Return a callable function for the bandpass filter or decomposition method corresponding to the given name.\n Filter methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | gaussian | implementation of a bandpass filter using Gaussian | | | weights | +-------------------+------------------------------------------------------+ | uniform | implementation of a filter where all weights are set | | | to one | +-------------------+------------------------------------------------------+ Decomposition methods: +-------------------+------------------------------------------------------+ | Name | Description | +===================+======================================================+ | fft | decomposition based on Fast Fourier Transform (FFT) | | | and a bandpass filter | +-------------------+------------------------------------------------------+ """ if isinstance(name, str): name = name.lower() else: raise TypeError("Only strings supported for the method's names.\n" + "Available names:" + str(list(_cascade_methods.keys()))) from None try: return _cascade_methods[name] except KeyError: raise ValueError("Unknown method {}\n".format(name) + "The available methods are:" + str(list(_cascade_methods.keys()))) from None
ef75047fa9bd0d4bc5dd6c263f399f446827daab
radar/lib/models/__init__.py
radar/lib/models/__init__.py
from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import *
from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.family_history import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import *
Add family history and pathology client-side
Add family history and pathology client-side
Python
agpl-3.0
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import * Add family history and pathology client-side
from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.family_history import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import *
<commit_before>from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import * <commit_msg>Add family history and pathology client-side<commit_after>
from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.family_history import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import *
from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import * Add family history and pathology client-sidefrom radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.family_history import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import *
<commit_before>from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import * <commit_msg>Add family history and pathology client-side<commit_after>from radar.lib.models.cohorts import * from radar.lib.models.common import * from radar.lib.models.comorbidities import * from radar.lib.models.diagnosis import * from radar.lib.models.dialysis import * from radar.lib.models.data_sources import * from radar.lib.models.family_history import * from radar.lib.models.genetics import * from radar.lib.models.hospitalisations import * from radar.lib.models.results import * from radar.lib.models.medications import * from radar.lib.models.organisations import * from radar.lib.models.posts import * from radar.lib.models.pathology import * from radar.lib.models.patients import * from radar.lib.models.patient_addresses import * from radar.lib.models.patient_aliases import * from radar.lib.models.patient_demographics import * from radar.lib.models.patient_numbers import * from radar.lib.models.plasmapheresis import * from radar.lib.models.renal_imaging import * from radar.lib.models.salt_wasting import * from radar.lib.models.transplants import * from radar.lib.models.users import *
88a11dc4bffccbdb585c2b09dc3eef0a0d2f6a59
config/settings_production.py
config/settings_production.py
""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ '193.175.133.246', 'localhost', ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip()
""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ "services-stage.policycompass.eu", "services-prod.policycompass.eu", "localhost" ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip()
Configure allowed host for productions setup
Configure allowed host for productions setup
Python
agpl-3.0
policycompass/policycompass-services,mmilaprat/policycompass-services,policycompass/policycompass-services,mmilaprat/policycompass-services,policycompass/policycompass-services,mmilaprat/policycompass-services
""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ '193.175.133.246', 'localhost', ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip() Configure allowed host for productions setup
""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ "services-stage.policycompass.eu", "services-prod.policycompass.eu", "localhost" ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip()
<commit_before>""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ '193.175.133.246', 'localhost', ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip() <commit_msg>Configure allowed host for productions setup<commit_after>
""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ "services-stage.policycompass.eu", "services-prod.policycompass.eu", "localhost" ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip()
""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ '193.175.133.246', 'localhost', ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip() Configure allowed host for productions setup""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ "services-stage.policycompass.eu", "services-prod.policycompass.eu", "localhost" ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip()
<commit_before>""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ '193.175.133.246', 'localhost', ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip() <commit_msg>Configure allowed host for productions setup<commit_after>""" Django settings for pc_datamanger project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ import os from .settings_basic import * from .settings import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ "services-stage.policycompass.eu", "services-prod.policycompass.eu", "localhost" ] with open('/etc/policycompass/secret_key') as f: SECRET_KEY = f.read().strip()
ab32e80f7d5bb92c969a0f0926a120325fad438b
peekaboo.py
peekaboo.py
import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0)
import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:][0] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0)
Use the list item, not the list.
Use the list item, not the list. Otherwise you get urls with `[u'1234']` in.
Python
mit
ghickman/peekaboo
import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0) Use the list item, not the list. Otherwise you get urls with `[u'1234']` in.
import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:][0] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0)
<commit_before>import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0) <commit_msg>Use the list item, not the list. Otherwise you get urls with `[u'1234']` in.<commit_after>
import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:][0] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0)
import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0) Use the list item, not the list. Otherwise you get urls with `[u'1234']` in.import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:][0] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0)
<commit_before>import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0) <commit_msg>Use the list item, not the list. Otherwise you get urls with `[u'1234']` in.<commit_after>import json import os import sys import time import requests import pync try: cache = [] headers = {'Authorization': 'bearer {0}'.format(os.environ['GITHUB_TOKEN'])} since = None url = 'https://api.github.com/notifications' while True: params = {'since': since} if since else {} r = requests.get(url, headers=headers, params=params) if not r.ok: raise Exception('GitHub returned {0}'.format(r.status_code)) since = time.strptime(r.headers['date'], '%a, %d %b %Y %H:%M:%S %Z') notifications = json.loads(r.content) if not notifications: cache = [] for notification in notifications: if not notification['id'] in cache: latest_comment_id = notification['subject']['latest_comment_url'].split('/')[-1:][0] issue_url = notification['subject']['url'].replace('api.', '').replace('repos/', '') open_url = '{0}#{1}'.format(issue_url, latest_comment_id) kwargs = { 'title': notification['repository']['full_name'], 'open': open_url, 'group': os.getpid(), } s = notification['subject']['url'].split('/')[-2:] pync.Notifier.notify('{0} #{1}'.format(s[0].title(), s[1]), **kwargs) cache.append(notification['id']) time.sleep(5) except KeyboardInterrupt: sys.exit(0)
956cfc16cee4d4069fdf21f980b881c3fa1864d6
office365/sharepoint/group.py
office365/sharepoint/group.py
from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users"))
from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) @property def resource_path(self): orig_path = ClientObject.resource_path.fget(self) if self.is_property_available("Id") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetById", self.properties["Id"])) if self.is_property_available("LoginName") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetByName", self.properties["LoginName"])) return orig_path
Fix resource_path of Group resource
Fix resource_path of Group resource
Python
mit
vgrem/SharePointOnline-REST-Python-Client,vgrem/Office365-REST-Python-Client
from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) Fix resource_path of Group resource
from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) @property def resource_path(self): orig_path = ClientObject.resource_path.fget(self) if self.is_property_available("Id") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetById", self.properties["Id"])) if self.is_property_available("LoginName") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetByName", self.properties["LoginName"])) return orig_path
<commit_before>from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) <commit_msg>Fix resource_path of Group resource<commit_after>
from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) @property def resource_path(self): orig_path = ClientObject.resource_path.fget(self) if self.is_property_available("Id") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetById", self.properties["Id"])) if self.is_property_available("LoginName") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetByName", self.properties["LoginName"])) return orig_path
from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) Fix resource_path of Group resourcefrom office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) @property def resource_path(self): orig_path = ClientObject.resource_path.fget(self) if self.is_property_available("Id") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetById", self.properties["Id"])) if self.is_property_available("LoginName") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetByName", self.properties["LoginName"])) return orig_path
<commit_before>from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) <commit_msg>Fix resource_path of Group resource<commit_after>from office365.runtime.client_object import ClientObject from office365.runtime.odata.odata_path_parser import ODataPathParser from office365.sharepoint.principal import Principal from office365.runtime.resource_path_entry import ResourcePathEntry class Group(Principal): """Represents a collection of users in a SharePoint site. A group is a type of SP.Principal.""" @property def users(self): from office365.sharepoint.user_collection import UserCollection """Gets a collection of user objects that represents all of the users in the group.""" if self.is_property_available('Users'): return self.properties['Users'] else: return UserCollection(self.context, ResourcePathEntry(self.context, self.resource_path, "Users")) @property def resource_path(self): orig_path = ClientObject.resource_path.fget(self) if self.is_property_available("Id") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetById", self.properties["Id"])) if self.is_property_available("LoginName") and orig_path is None: return ResourcePathEntry(self.context, self.context.web.site_groups.resource_path, ODataPathParser.from_method("GetByName", self.properties["LoginName"])) return orig_path
63c43ae652ac0b18aba5f56f70271f95e43815d6
django/echonest/utils.py
django/echonest/utils.py
from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(url) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names
from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(str(url)) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names
Fix bug in The Echo Nest API call
Fix bug in The Echo Nest API call
Python
bsd-3-clause
FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja
from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(url) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names Fix bug in The Echo Nest API call
from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(str(url)) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names
<commit_before>from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(url) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names <commit_msg>Fix bug in The Echo Nest API call<commit_after>
from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(str(url)) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names
from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(url) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names Fix bug in The Echo Nest API callfrom django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(str(url)) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names
<commit_before>from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(url) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names <commit_msg>Fix bug in The Echo Nest API call<commit_after>from django.conf import settings from purl import Template import requests from .models import SimilarResponse API_URL = Template("http://developer.echonest.com/api/v4/artist/similar" "?api_key=%s&results=100&name={name}" % settings.ECHONEST_API_KEY) def get_similar_from_api(name): url = API_URL.expand({'name': name}) r = requests.get(str(url)) r.raise_for_status() return SimilarResponse.objects.create(name=name, response=r.json()) def get_similar_from_db(name): return SimilarResponse.objects.get(normalized_name=name.upper()) def get_similar(name): try: response = get_similar_from_db(name) except SimilarResponse.DoesNotExist: response = get_similar_from_api(name) return response.artist_names
3471dd3196c134f7aee35aa38370c93915be2197
example/__init__.py
example/__init__.py
import webapp2 class IntroHandler(webapp2.RequestHandler): def get(self): pass app = webapp2.WSGIApplication([ ('/', IntroHandler), ])
# # Copyright 2012 WebFilings, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Contained within this module are several working examples showing basic usage and complex context-based chaining. The examples demonstrate basic task execution, and also the basics of creating more complicated processing pipelines. """ import logging import webapp2 def example_function(*args, **kwargs): logging.info('example_function executed with args: %r, kwargs: %r', args, kwargs) return args class AsyncIntroHandler(webapp2.RequestHandler): def get(self): """Create and insert a single furious task.""" from furious.async import Async # Instantiate an Async object. async_task = Async( target=example_function, args=[1], kwargs={'some': 'value'}) # Insert the task to run the Async object, not that it may begin # executing immediately or with some delay. async_task.start() logging.info('Async job kicked off.') self.response.out.write('Successfully inserted Async job.') class ContextIntroHandler(webapp2.RequestHandler): def get(self): """Batch insert a group of furious tasks.""" from furious.async import Async from furious import context with context.new() as ctx: # "Manually" instantiate and add an Async object. async_task = Async( target=example_function, kwargs={'first': 'async'}) ctx.add(async_task) logging.info('Added manual job to context.') for i in xrange(5): ctx.add(target=example_function, args=[i]) logging.info('Added job %d to context.', i) logging.info('Async jobs for context batch inserted.') self.response.out.write('Successfully inserted a group of Async jobs.') app = webapp2.WSGIApplication([ ('/', AsyncIntroHandler), ('/context', ContextIntroHandler), ])
Add basic draft of example code.
Add basic draft of example code.
Python
apache-2.0
robertkluin/furious,Workiva/furious,rosshendrickson-wf/furious,andreleblanc-wf/furious,rosshendrickson-wf/furious,andreleblanc-wf/furious,beaulyddon-wf/furious,Workiva/furious,mattsanders-wf/furious,beaulyddon-wf/furious,mattsanders-wf/furious
import webapp2 class IntroHandler(webapp2.RequestHandler): def get(self): pass app = webapp2.WSGIApplication([ ('/', IntroHandler), ]) Add basic draft of example code.
# # Copyright 2012 WebFilings, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Contained within this module are several working examples showing basic usage and complex context-based chaining. The examples demonstrate basic task execution, and also the basics of creating more complicated processing pipelines. """ import logging import webapp2 def example_function(*args, **kwargs): logging.info('example_function executed with args: %r, kwargs: %r', args, kwargs) return args class AsyncIntroHandler(webapp2.RequestHandler): def get(self): """Create and insert a single furious task.""" from furious.async import Async # Instantiate an Async object. async_task = Async( target=example_function, args=[1], kwargs={'some': 'value'}) # Insert the task to run the Async object, not that it may begin # executing immediately or with some delay. async_task.start() logging.info('Async job kicked off.') self.response.out.write('Successfully inserted Async job.') class ContextIntroHandler(webapp2.RequestHandler): def get(self): """Batch insert a group of furious tasks.""" from furious.async import Async from furious import context with context.new() as ctx: # "Manually" instantiate and add an Async object. async_task = Async( target=example_function, kwargs={'first': 'async'}) ctx.add(async_task) logging.info('Added manual job to context.') for i in xrange(5): ctx.add(target=example_function, args=[i]) logging.info('Added job %d to context.', i) logging.info('Async jobs for context batch inserted.') self.response.out.write('Successfully inserted a group of Async jobs.') app = webapp2.WSGIApplication([ ('/', AsyncIntroHandler), ('/context', ContextIntroHandler), ])
<commit_before> import webapp2 class IntroHandler(webapp2.RequestHandler): def get(self): pass app = webapp2.WSGIApplication([ ('/', IntroHandler), ]) <commit_msg>Add basic draft of example code.<commit_after>
# # Copyright 2012 WebFilings, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Contained within this module are several working examples showing basic usage and complex context-based chaining. The examples demonstrate basic task execution, and also the basics of creating more complicated processing pipelines. """ import logging import webapp2 def example_function(*args, **kwargs): logging.info('example_function executed with args: %r, kwargs: %r', args, kwargs) return args class AsyncIntroHandler(webapp2.RequestHandler): def get(self): """Create and insert a single furious task.""" from furious.async import Async # Instantiate an Async object. async_task = Async( target=example_function, args=[1], kwargs={'some': 'value'}) # Insert the task to run the Async object, not that it may begin # executing immediately or with some delay. async_task.start() logging.info('Async job kicked off.') self.response.out.write('Successfully inserted Async job.') class ContextIntroHandler(webapp2.RequestHandler): def get(self): """Batch insert a group of furious tasks.""" from furious.async import Async from furious import context with context.new() as ctx: # "Manually" instantiate and add an Async object. async_task = Async( target=example_function, kwargs={'first': 'async'}) ctx.add(async_task) logging.info('Added manual job to context.') for i in xrange(5): ctx.add(target=example_function, args=[i]) logging.info('Added job %d to context.', i) logging.info('Async jobs for context batch inserted.') self.response.out.write('Successfully inserted a group of Async jobs.') app = webapp2.WSGIApplication([ ('/', AsyncIntroHandler), ('/context', ContextIntroHandler), ])
import webapp2 class IntroHandler(webapp2.RequestHandler): def get(self): pass app = webapp2.WSGIApplication([ ('/', IntroHandler), ]) Add basic draft of example code.# # Copyright 2012 WebFilings, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Contained within this module are several working examples showing basic usage and complex context-based chaining. The examples demonstrate basic task execution, and also the basics of creating more complicated processing pipelines. """ import logging import webapp2 def example_function(*args, **kwargs): logging.info('example_function executed with args: %r, kwargs: %r', args, kwargs) return args class AsyncIntroHandler(webapp2.RequestHandler): def get(self): """Create and insert a single furious task.""" from furious.async import Async # Instantiate an Async object. async_task = Async( target=example_function, args=[1], kwargs={'some': 'value'}) # Insert the task to run the Async object, not that it may begin # executing immediately or with some delay. async_task.start() logging.info('Async job kicked off.') self.response.out.write('Successfully inserted Async job.') class ContextIntroHandler(webapp2.RequestHandler): def get(self): """Batch insert a group of furious tasks.""" from furious.async import Async from furious import context with context.new() as ctx: # "Manually" instantiate and add an Async object. async_task = Async( target=example_function, kwargs={'first': 'async'}) ctx.add(async_task) logging.info('Added manual job to context.') for i in xrange(5): ctx.add(target=example_function, args=[i]) logging.info('Added job %d to context.', i) logging.info('Async jobs for context batch inserted.') self.response.out.write('Successfully inserted a group of Async jobs.') app = webapp2.WSGIApplication([ ('/', AsyncIntroHandler), ('/context', ContextIntroHandler), ])
<commit_before> import webapp2 class IntroHandler(webapp2.RequestHandler): def get(self): pass app = webapp2.WSGIApplication([ ('/', IntroHandler), ]) <commit_msg>Add basic draft of example code.<commit_after># # Copyright 2012 WebFilings, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Contained within this module are several working examples showing basic usage and complex context-based chaining. The examples demonstrate basic task execution, and also the basics of creating more complicated processing pipelines. """ import logging import webapp2 def example_function(*args, **kwargs): logging.info('example_function executed with args: %r, kwargs: %r', args, kwargs) return args class AsyncIntroHandler(webapp2.RequestHandler): def get(self): """Create and insert a single furious task.""" from furious.async import Async # Instantiate an Async object. async_task = Async( target=example_function, args=[1], kwargs={'some': 'value'}) # Insert the task to run the Async object, not that it may begin # executing immediately or with some delay. async_task.start() logging.info('Async job kicked off.') self.response.out.write('Successfully inserted Async job.') class ContextIntroHandler(webapp2.RequestHandler): def get(self): """Batch insert a group of furious tasks.""" from furious.async import Async from furious import context with context.new() as ctx: # "Manually" instantiate and add an Async object. async_task = Async( target=example_function, kwargs={'first': 'async'}) ctx.add(async_task) logging.info('Added manual job to context.') for i in xrange(5): ctx.add(target=example_function, args=[i]) logging.info('Added job %d to context.', i) logging.info('Async jobs for context batch inserted.') self.response.out.write('Successfully inserted a group of Async jobs.') app = webapp2.WSGIApplication([ ('/', AsyncIntroHandler), ('/context', ContextIntroHandler), ])
0e8cc65317e7c443b4319b028395951185ef80de
filebrowser/urls.py
filebrowser/urls.py
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', redirect_to, {'url': '/admin/business/photo/?_popup=1'}, name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), )
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', 'filebrowser.views.browse', name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), )
Revert URL redirect (didn't work)
Revert URL redirect (didn't work)
Python
bsd-3-clause
django-wodnas/django-filebrowser-no-grappelli,sandow-digital/django-filebrowser-no-grappelli-sandow,sandow-digital/django-filebrowser-no-grappelli-sandow,django-wodnas/django-filebrowser-no-grappelli,sandow-digital/django-filebrowser-no-grappelli-sandow,django-wodnas/django-filebrowser-no-grappelli
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', redirect_to, {'url': '/admin/business/photo/?_popup=1'}, name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), ) Revert URL redirect (didn't work)
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', 'filebrowser.views.browse', name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), )
<commit_before>from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', redirect_to, {'url': '/admin/business/photo/?_popup=1'}, name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), ) <commit_msg>Revert URL redirect (didn't work)<commit_after>
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', 'filebrowser.views.browse', name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), )
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', redirect_to, {'url': '/admin/business/photo/?_popup=1'}, name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), ) Revert URL redirect (didn't work)from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', 'filebrowser.views.browse', name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), )
<commit_before>from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', redirect_to, {'url': '/admin/business/photo/?_popup=1'}, name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), ) <commit_msg>Revert URL redirect (didn't work)<commit_after>from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to urlpatterns = patterns('', # filebrowser urls url(r'^browse/$', 'filebrowser.views.browse', name="fb_browse"), url(r'^mkdir/', 'filebrowser.views.mkdir', name="fb_mkdir"), url(r'^upload/', 'filebrowser.views.upload', name="fb_upload"), url(r'^rename/$', 'filebrowser.views.rename', name="fb_rename"), url(r'^delete/$', 'filebrowser.views.delete', name="fb_delete"), url(r'^versions/$', 'filebrowser.views.versions', name="fb_versions"), url(r'^check_file/$', 'filebrowser.views._check_file', name="fb_check"), url(r'^upload_file/$', 'filebrowser.views._upload_file', name="fb_do_upload"), )
32191547567e9ce0cdec954d0079fb18f85b38ce
requests_oauthlib/oauth2_auth.py
requests_oauthlib/oauth2_auth.py
from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from .utils import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r
from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from oauthlib.oauth2 import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r
Use newly exposed oauth2.is_secure_transport instead of duplicate.
Use newly exposed oauth2.is_secure_transport instead of duplicate.
Python
isc
requests/requests-oauthlib,lucidbard/requests-oauthlib,dongguangming/requests-oauthlib,elafarge/requests-oauthlib,abhi931375/requests-oauthlib,gras100/asks-oauthlib,sigmavirus24/requests-oauthlib,jayvdb/requests-oauthlib,jsfan/requests-oauthlib,jayvdb/requests-oauthlib,singingwolfboy/requests-oauthlib
from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from .utils import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r Use newly exposed oauth2.is_secure_transport instead of duplicate.
from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from oauthlib.oauth2 import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r
<commit_before>from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from .utils import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r <commit_msg>Use newly exposed oauth2.is_secure_transport instead of duplicate.<commit_after>
from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from oauthlib.oauth2 import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r
from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from .utils import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r Use newly exposed oauth2.is_secure_transport instead of duplicate.from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from oauthlib.oauth2 import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r
<commit_before>from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from .utils import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r <commit_msg>Use newly exposed oauth2.is_secure_transport instead of duplicate.<commit_after>from __future__ import unicode_literals from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError from oauthlib.oauth2 import is_secure_transport class OAuth2(object): """Adds proof of authorization (OAuth2 token) to the request.""" def __init__(self, client_id=None, client=None, token=None): """Construct a new OAuth 2 authorization object. :param client_id: Client id obtained during registration :param client: :class:`oauthlib.oauth2.Client` to be used. Default is WebApplicationClient which is useful for any hosted application but not mobile or desktop. :param token: Token dictionary, must include access_token and token_type. """ self._client = client or WebApplicationClient(client_id, token=token) if token: for k, v in token.items(): setattr(self._client, k, v) def __call__(self, r): """Append an OAuth 2 token to the request. Note that currently HTTPS is required for all requests. There may be a token type that allows for plain HTTP in the future and then this should be updated to allow plain HTTP on a white list basis. """ if not is_secure_transport(r.url): raise InsecureTransportError() r.url, r.headers, r.body = self._client.add_token(r.url, http_method=r.method, body=r.body, headers=r.headers) return r
cedbfda6e9c040c6924eae2eff0e9b4e9f3f93f0
api/core/helpers.py
api/core/helpers.py
import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token # TODO: Convert this to an email template if welcome: subject = "Welcome to Voter Engagement" else: subject = "Greetings from Voter Engagement" body = f"Click here to log in: {url}" email = EmailMessage( subject=subject, body=body, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) log.debug(f"Sending email: {prettify(email.__dict__)}") count = email.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}"
import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token message = EmailMessage( subject=None, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) if welcome: message.template_id = 'voter-engagement-welcome' else: message.template_id = 'voter-engagement-login' message.merge_global_data = { 'FIRST_NAME': user.first_name, 'LAST_NAME': user.last_name, 'LOGIN_URL': url, # TODO: Set site URL dynamically 'SITE_URL': 'https://alpha-vote.citizenlabs.org/', # TODO: Implement unsubscribe functionality 'UNSUBSCRIBE_URL': 'https://citizenlabs.org/contact/', 'ABOUT_URL': 'https://citizenlabs.org/about/', } log.debug(f"Sending email: {prettify(message.__dict__)}") count = message.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}"
Use Mandrill templates to send emails
Use Mandrill templates to send emails
Python
mit
citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement
import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token # TODO: Convert this to an email template if welcome: subject = "Welcome to Voter Engagement" else: subject = "Greetings from Voter Engagement" body = f"Click here to log in: {url}" email = EmailMessage( subject=subject, body=body, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) log.debug(f"Sending email: {prettify(email.__dict__)}") count = email.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}" Use Mandrill templates to send emails
import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token message = EmailMessage( subject=None, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) if welcome: message.template_id = 'voter-engagement-welcome' else: message.template_id = 'voter-engagement-login' message.merge_global_data = { 'FIRST_NAME': user.first_name, 'LAST_NAME': user.last_name, 'LOGIN_URL': url, # TODO: Set site URL dynamically 'SITE_URL': 'https://alpha-vote.citizenlabs.org/', # TODO: Implement unsubscribe functionality 'UNSUBSCRIBE_URL': 'https://citizenlabs.org/contact/', 'ABOUT_URL': 'https://citizenlabs.org/about/', } log.debug(f"Sending email: {prettify(message.__dict__)}") count = message.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}"
<commit_before>import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token # TODO: Convert this to an email template if welcome: subject = "Welcome to Voter Engagement" else: subject = "Greetings from Voter Engagement" body = f"Click here to log in: {url}" email = EmailMessage( subject=subject, body=body, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) log.debug(f"Sending email: {prettify(email.__dict__)}") count = email.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}" <commit_msg>Use Mandrill templates to send emails<commit_after>
import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token message = EmailMessage( subject=None, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) if welcome: message.template_id = 'voter-engagement-welcome' else: message.template_id = 'voter-engagement-login' message.merge_global_data = { 'FIRST_NAME': user.first_name, 'LAST_NAME': user.last_name, 'LOGIN_URL': url, # TODO: Set site URL dynamically 'SITE_URL': 'https://alpha-vote.citizenlabs.org/', # TODO: Implement unsubscribe functionality 'UNSUBSCRIBE_URL': 'https://citizenlabs.org/contact/', 'ABOUT_URL': 'https://citizenlabs.org/about/', } log.debug(f"Sending email: {prettify(message.__dict__)}") count = message.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}"
import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token # TODO: Convert this to an email template if welcome: subject = "Welcome to Voter Engagement" else: subject = "Greetings from Voter Engagement" body = f"Click here to log in: {url}" email = EmailMessage( subject=subject, body=body, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) log.debug(f"Sending email: {prettify(email.__dict__)}") count = email.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}" Use Mandrill templates to send emailsimport pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token message = EmailMessage( subject=None, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) if welcome: message.template_id = 'voter-engagement-welcome' else: message.template_id = 'voter-engagement-login' message.merge_global_data = { 'FIRST_NAME': user.first_name, 'LAST_NAME': user.last_name, 'LOGIN_URL': url, # TODO: Set site URL dynamically 'SITE_URL': 'https://alpha-vote.citizenlabs.org/', # TODO: Implement unsubscribe functionality 'UNSUBSCRIBE_URL': 'https://citizenlabs.org/contact/', 'ABOUT_URL': 'https://citizenlabs.org/about/', } log.debug(f"Sending email: {prettify(message.__dict__)}") count = message.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}"
<commit_before>import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token # TODO: Convert this to an email template if welcome: subject = "Welcome to Voter Engagement" else: subject = "Greetings from Voter Engagement" body = f"Click here to log in: {url}" email = EmailMessage( subject=subject, body=body, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) log.debug(f"Sending email: {prettify(email.__dict__)}") count = email.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}" <commit_msg>Use Mandrill templates to send emails<commit_after>import pprint from django.core.mail import EmailMessage import log from rest_framework.reverse import reverse from sesame.utils import get_query_string def send_login_email(user, request, *, welcome): assert user.email, f"User has no email: {user}" base = reverse('redirector', args=["login"], request=request) token = get_query_string(user) url = base + token message = EmailMessage( subject=None, from_email="Citizen Labs <noreply@citizenlabs.org>", to=[user.email], ) if welcome: message.template_id = 'voter-engagement-welcome' else: message.template_id = 'voter-engagement-login' message.merge_global_data = { 'FIRST_NAME': user.first_name, 'LAST_NAME': user.last_name, 'LOGIN_URL': url, # TODO: Set site URL dynamically 'SITE_URL': 'https://alpha-vote.citizenlabs.org/', # TODO: Implement unsubscribe functionality 'UNSUBSCRIBE_URL': 'https://citizenlabs.org/contact/', 'ABOUT_URL': 'https://citizenlabs.org/about/', } log.debug(f"Sending email: {prettify(message.__dict__)}") count = message.send(fail_silently=False) return count def prettify(data: dict): return "{\n " + pprint.pformat(data, indent=2)[1:-1] + ",\n}"
bf4af59b4a9d0637d3743b6b6ff0eaef18dbb902
flask_restplus/namespace.py
flask_restplus/namespace.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper
# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc is not None: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper
Hide resource if doc is False
Hide resource if doc is False
Python
mit
leiserfg/flask-restplus,luminusnetworks/flask-restplus,awiddersheim/flask-restplus,awiddersheim/flask-restplus,fixedd/flask-restplus,luminusnetworks/flask-restplus,fixedd/flask-restplus,leiserfg/flask-restplus
# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper Hide resource if doc is False
# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc is not None: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper <commit_msg>Hide resource if doc is False<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc is not None: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper
# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper Hide resource if doc is False# -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc is not None: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper <commit_msg>Hide resource if doc is False<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals class ApiNamespace(object): def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs): self.api = api self.name = name self.path = path or ('/' + name) self.description = description self.resources = [] self.models = [] def add_resource(self, resource, *urls, **kwargs): self.resources.append((resource, urls, kwargs)) self.api.add_resource(resource, *urls, namespace=self, **kwargs) def route(self, *urls, **kwargs): def wrapper(cls): doc = kwargs.pop('doc', None) if doc is not None: self.api._handle_api_doc(cls, doc) self.add_resource(cls, *[self.path + url for url in urls], **kwargs) return cls return wrapper
69ac16b1501f9affa008c68d4b8197b320ae00b8
cleanup.py
cleanup.py
#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = float(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): # subprocess.call(['docker', 'rm', image_name]) print('docker rm ' + image_name) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name)
#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = int(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): subprocess.check_call(['docker', 'rm', image_name]) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name)
Delete images instead of printing
Delete images instead of printing
Python
mit
dreipol/cleanup-deis-images,dreipol/cleanup-deis-images
#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = float(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): # subprocess.call(['docker', 'rm', image_name]) print('docker rm ' + image_name) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name) Delete images instead of printing
#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = int(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): subprocess.check_call(['docker', 'rm', image_name]) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name)
<commit_before>#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = float(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): # subprocess.call(['docker', 'rm', image_name]) print('docker rm ' + image_name) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name) <commit_msg>Delete images instead of printing<commit_after>
#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = int(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): subprocess.check_call(['docker', 'rm', image_name]) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name)
#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = float(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): # subprocess.call(['docker', 'rm', image_name]) print('docker rm ' + image_name) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name) Delete images instead of printing#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = int(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): subprocess.check_call(['docker', 'rm', image_name]) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name)
<commit_before>#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = float(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): # subprocess.call(['docker', 'rm', image_name]) print('docker rm ' + image_name) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name) <commit_msg>Delete images instead of printing<commit_after>#!/usr/bin/env python from collections import defaultdict import subprocess import os KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4) def find_obsolete_images(images): for image_name, versions in images.items(): if len(versions) > KEEP_LAST_VERSIONS: obsolete_versions = sorted(versions, reverse=True)[4:] for version in obsolete_versions: yield '{}:{}'.format(image_name, version) def parse_images(lines): images = defaultdict(list) for line in lines: try: image_name, version = line.split(' ') version_num = int(version.replace('v', '')) images[image_name].append(version_num) except ValueError: pass return images def remove_image(image_name): subprocess.check_call(['docker', 'rm', image_name]) def all_images(): output = subprocess \ .check_output(['./docker_image_versions.sh'], shell=True) \ .decode('utf-8') lines = output.split('\n') return parse_images(lines) if __name__ == '__main__': images = all_images() for image_name in find_obsolete_images(images): remove_image(image_name)
c733126501690c7168d757ab9f14a4877e8544da
resolwe/flow/managers/workload_connectors/__init__.py
resolwe/flow/managers/workload_connectors/__init__.py
""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: """
""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: .. automodule:: resolwe.flow.managers.workload_connectors.kubernetes :members: """
Add Kubernetes workload connector to documentation
Add Kubernetes workload connector to documentation
Python
apache-2.0
genialis/resolwe,genialis/resolwe
""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: """ Add Kubernetes workload connector to documentation
""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: .. automodule:: resolwe.flow.managers.workload_connectors.kubernetes :members: """
<commit_before>""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: """ <commit_msg>Add Kubernetes workload connector to documentation<commit_after>
""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: .. automodule:: resolwe.flow.managers.workload_connectors.kubernetes :members: """
""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: """ Add Kubernetes workload connector to documentation""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: .. automodule:: resolwe.flow.managers.workload_connectors.kubernetes :members: """
<commit_before>""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: """ <commit_msg>Add Kubernetes workload connector to documentation<commit_after>""".. Ignore pydocstyle D400. =================== Workload Connectors =================== The workload management system connectors are used as glue between the Resolwe Manager and various concrete workload management systems that might be used by it. Since the only functional requirement is job submission, they can be simple and nearly contextless. .. automodule:: resolwe.flow.managers.workload_connectors.base :members: .. automodule:: resolwe.flow.managers.workload_connectors.local :members: .. automodule:: resolwe.flow.managers.workload_connectors.celery :members: .. automodule:: resolwe.flow.managers.workload_connectors.slurm :members: .. automodule:: resolwe.flow.managers.workload_connectors.kubernetes :members: """
b6fd1c849402d42bd00467406ae8c5dff42f2d03
tests/test_style.py
tests/test_style.py
import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed')
import logging import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): logger = logging.getLogger('flake8') logger.setLevel(logging.ERROR) flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed')
Decrease noise from code-style test
Decrease noise from code-style test
Python
mit
ministryofjustice/django-moj-irat
import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed') Decrease noise from code-style test
import logging import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): logger = logging.getLogger('flake8') logger.setLevel(logging.ERROR) flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed')
<commit_before>import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed') <commit_msg>Decrease noise from code-style test<commit_after>
import logging import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): logger = logging.getLogger('flake8') logger.setLevel(logging.ERROR) flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed')
import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed') Decrease noise from code-style testimport logging import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): logger = logging.getLogger('flake8') logger.setLevel(logging.ERROR) flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed')
<commit_before>import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed') <commit_msg>Decrease noise from code-style test<commit_after>import logging import pkg_resources import unittest class CodeStyleTestCase(unittest.TestCase): def test_code_style(self): logger = logging.getLogger('flake8') logger.setLevel(logging.ERROR) flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8') try: flake8([]) except SystemExit as e: if e.code != 0: self.fail('Code style checks failed')
dffe54088c9cb66a29a7aa63de269730de8a67d7
tests/test_utils.py
tests/test_utils.py
import pytest from .test_views import twenty_name_fixtures from django.test.client import RequestFactory from name.views import ( normalize_query, resolve_q, resolve_type, filter_names ) # FIXME: This is used to silence PEP8 warnings twenty_name_fixtures = twenty_name_fixtures @pytest.mark.xfail def test_get_unique_user_id(): assert False @pytest.mark.django_db def test_filter_names_with_empty_query(twenty_name_fixtures): names = filter_names('', None) assert names.count() == twenty_name_fixtures.count() # TODO: Create a new fixture where we know how many of # each type is known @pytest.mark.django_db def test_filter_names_with_query(twenty_name_fixtures): names = filter_names('1', None) assert names.count() == 11 @pytest.mark.parametrize('query,expected', [ ('Personal', 1), ('Personal,Building,Organization', 3), ('Personal,Building,Organization,Software,Event', 5), ('Personal,Building,Organization,', 3), ('Personal, Location, Organiztion', 1), ('Personal, Building, Organiztion,', 1), ('Personal Building Organiztion', 0), ('Unknown,Types', 0), ('Unknown,Types', 0) ]) def test_resolve_type(query, expected): rf = RequestFactory() request = rf.get('/', {'q_type': query}) types = resolve_type(request) assert len(types) == expected def test_resolve_q_returns_value(): rf = RequestFactory() q = resolve_q(rf.get('/', {'q': 'value'})) assert q == 'value' def test_resolve_q_returns_empty_string(): rf = RequestFactory() q = resolve_q(rf.get('/')) assert '' == q @pytest.mark.xfail def test_get_query(): assert False @pytest.mark.parametrize('query,expected', [ ('one two three four', 4), ('extra spaces Here', 3), ('\try escape sequence', 3), ('"Pun. cua," tion! !!', 3), ]) def test_normalize_query(query, expected): normalized = normalize_query(query) assert len(normalized) == expected @pytest.mark.xfail def test_calc_total_by_month(): assert False @pytest.mark.xfail def test_prepare_graph_date_range(): assert False
Add tests for some helper functions in views.py.
Add tests for some helper functions in views.py.
Python
bsd-3-clause
damonkelley/django-name,unt-libraries/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name
Add tests for some helper functions in views.py.
import pytest from .test_views import twenty_name_fixtures from django.test.client import RequestFactory from name.views import ( normalize_query, resolve_q, resolve_type, filter_names ) # FIXME: This is used to silence PEP8 warnings twenty_name_fixtures = twenty_name_fixtures @pytest.mark.xfail def test_get_unique_user_id(): assert False @pytest.mark.django_db def test_filter_names_with_empty_query(twenty_name_fixtures): names = filter_names('', None) assert names.count() == twenty_name_fixtures.count() # TODO: Create a new fixture where we know how many of # each type is known @pytest.mark.django_db def test_filter_names_with_query(twenty_name_fixtures): names = filter_names('1', None) assert names.count() == 11 @pytest.mark.parametrize('query,expected', [ ('Personal', 1), ('Personal,Building,Organization', 3), ('Personal,Building,Organization,Software,Event', 5), ('Personal,Building,Organization,', 3), ('Personal, Location, Organiztion', 1), ('Personal, Building, Organiztion,', 1), ('Personal Building Organiztion', 0), ('Unknown,Types', 0), ('Unknown,Types', 0) ]) def test_resolve_type(query, expected): rf = RequestFactory() request = rf.get('/', {'q_type': query}) types = resolve_type(request) assert len(types) == expected def test_resolve_q_returns_value(): rf = RequestFactory() q = resolve_q(rf.get('/', {'q': 'value'})) assert q == 'value' def test_resolve_q_returns_empty_string(): rf = RequestFactory() q = resolve_q(rf.get('/')) assert '' == q @pytest.mark.xfail def test_get_query(): assert False @pytest.mark.parametrize('query,expected', [ ('one two three four', 4), ('extra spaces Here', 3), ('\try escape sequence', 3), ('"Pun. cua," tion! !!', 3), ]) def test_normalize_query(query, expected): normalized = normalize_query(query) assert len(normalized) == expected @pytest.mark.xfail def test_calc_total_by_month(): assert False @pytest.mark.xfail def test_prepare_graph_date_range(): assert False
<commit_before><commit_msg>Add tests for some helper functions in views.py.<commit_after>
import pytest from .test_views import twenty_name_fixtures from django.test.client import RequestFactory from name.views import ( normalize_query, resolve_q, resolve_type, filter_names ) # FIXME: This is used to silence PEP8 warnings twenty_name_fixtures = twenty_name_fixtures @pytest.mark.xfail def test_get_unique_user_id(): assert False @pytest.mark.django_db def test_filter_names_with_empty_query(twenty_name_fixtures): names = filter_names('', None) assert names.count() == twenty_name_fixtures.count() # TODO: Create a new fixture where we know how many of # each type is known @pytest.mark.django_db def test_filter_names_with_query(twenty_name_fixtures): names = filter_names('1', None) assert names.count() == 11 @pytest.mark.parametrize('query,expected', [ ('Personal', 1), ('Personal,Building,Organization', 3), ('Personal,Building,Organization,Software,Event', 5), ('Personal,Building,Organization,', 3), ('Personal, Location, Organiztion', 1), ('Personal, Building, Organiztion,', 1), ('Personal Building Organiztion', 0), ('Unknown,Types', 0), ('Unknown,Types', 0) ]) def test_resolve_type(query, expected): rf = RequestFactory() request = rf.get('/', {'q_type': query}) types = resolve_type(request) assert len(types) == expected def test_resolve_q_returns_value(): rf = RequestFactory() q = resolve_q(rf.get('/', {'q': 'value'})) assert q == 'value' def test_resolve_q_returns_empty_string(): rf = RequestFactory() q = resolve_q(rf.get('/')) assert '' == q @pytest.mark.xfail def test_get_query(): assert False @pytest.mark.parametrize('query,expected', [ ('one two three four', 4), ('extra spaces Here', 3), ('\try escape sequence', 3), ('"Pun. cua," tion! !!', 3), ]) def test_normalize_query(query, expected): normalized = normalize_query(query) assert len(normalized) == expected @pytest.mark.xfail def test_calc_total_by_month(): assert False @pytest.mark.xfail def test_prepare_graph_date_range(): assert False
Add tests for some helper functions in views.py.import pytest from .test_views import twenty_name_fixtures from django.test.client import RequestFactory from name.views import ( normalize_query, resolve_q, resolve_type, filter_names ) # FIXME: This is used to silence PEP8 warnings twenty_name_fixtures = twenty_name_fixtures @pytest.mark.xfail def test_get_unique_user_id(): assert False @pytest.mark.django_db def test_filter_names_with_empty_query(twenty_name_fixtures): names = filter_names('', None) assert names.count() == twenty_name_fixtures.count() # TODO: Create a new fixture where we know how many of # each type is known @pytest.mark.django_db def test_filter_names_with_query(twenty_name_fixtures): names = filter_names('1', None) assert names.count() == 11 @pytest.mark.parametrize('query,expected', [ ('Personal', 1), ('Personal,Building,Organization', 3), ('Personal,Building,Organization,Software,Event', 5), ('Personal,Building,Organization,', 3), ('Personal, Location, Organiztion', 1), ('Personal, Building, Organiztion,', 1), ('Personal Building Organiztion', 0), ('Unknown,Types', 0), ('Unknown,Types', 0) ]) def test_resolve_type(query, expected): rf = RequestFactory() request = rf.get('/', {'q_type': query}) types = resolve_type(request) assert len(types) == expected def test_resolve_q_returns_value(): rf = RequestFactory() q = resolve_q(rf.get('/', {'q': 'value'})) assert q == 'value' def test_resolve_q_returns_empty_string(): rf = RequestFactory() q = resolve_q(rf.get('/')) assert '' == q @pytest.mark.xfail def test_get_query(): assert False @pytest.mark.parametrize('query,expected', [ ('one two three four', 4), ('extra spaces Here', 3), ('\try escape sequence', 3), ('"Pun. cua," tion! !!', 3), ]) def test_normalize_query(query, expected): normalized = normalize_query(query) assert len(normalized) == expected @pytest.mark.xfail def test_calc_total_by_month(): assert False @pytest.mark.xfail def test_prepare_graph_date_range(): assert False
<commit_before><commit_msg>Add tests for some helper functions in views.py.<commit_after>import pytest from .test_views import twenty_name_fixtures from django.test.client import RequestFactory from name.views import ( normalize_query, resolve_q, resolve_type, filter_names ) # FIXME: This is used to silence PEP8 warnings twenty_name_fixtures = twenty_name_fixtures @pytest.mark.xfail def test_get_unique_user_id(): assert False @pytest.mark.django_db def test_filter_names_with_empty_query(twenty_name_fixtures): names = filter_names('', None) assert names.count() == twenty_name_fixtures.count() # TODO: Create a new fixture where we know how many of # each type is known @pytest.mark.django_db def test_filter_names_with_query(twenty_name_fixtures): names = filter_names('1', None) assert names.count() == 11 @pytest.mark.parametrize('query,expected', [ ('Personal', 1), ('Personal,Building,Organization', 3), ('Personal,Building,Organization,Software,Event', 5), ('Personal,Building,Organization,', 3), ('Personal, Location, Organiztion', 1), ('Personal, Building, Organiztion,', 1), ('Personal Building Organiztion', 0), ('Unknown,Types', 0), ('Unknown,Types', 0) ]) def test_resolve_type(query, expected): rf = RequestFactory() request = rf.get('/', {'q_type': query}) types = resolve_type(request) assert len(types) == expected def test_resolve_q_returns_value(): rf = RequestFactory() q = resolve_q(rf.get('/', {'q': 'value'})) assert q == 'value' def test_resolve_q_returns_empty_string(): rf = RequestFactory() q = resolve_q(rf.get('/')) assert '' == q @pytest.mark.xfail def test_get_query(): assert False @pytest.mark.parametrize('query,expected', [ ('one two three four', 4), ('extra spaces Here', 3), ('\try escape sequence', 3), ('"Pun. cua," tion! !!', 3), ]) def test_normalize_query(query, expected): normalized = normalize_query(query) assert len(normalized) == expected @pytest.mark.xfail def test_calc_total_by_month(): assert False @pytest.mark.xfail def test_prepare_graph_date_range(): assert False
6518911dad0d22e878d618f9a9a1472de7a7ee1e
config/fuzz_pox_mesh.py
config/fuzz_pox_mesh.py
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.mock_discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops, mock_link_discovery=True) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
Use the mock discovery module
Use the mock discovery module
Python
apache-2.0
jmiserez/sts,ucb-sts/sts,ucb-sts/sts,jmiserez/sts
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops) #control_flow = Interactive(simulation_config, input_logger=InputLogger()) Use the mock discovery module
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.mock_discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops, mock_link_discovery=True) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
<commit_before>from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops) #control_flow = Interactive(simulation_config, input_logger=InputLogger()) <commit_msg>Use the mock discovery module<commit_after>
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.mock_discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops, mock_link_discovery=True) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops) #control_flow = Interactive(simulation_config, input_logger=InputLogger()) Use the mock discovery modulefrom experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.mock_discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops, mock_link_discovery=True) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
<commit_before>from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops) #control_flow = Interactive(simulation_config, input_logger=InputLogger()) <commit_msg>Use the mock discovery module<commit_after>from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose sts.syncproto.pox_syncer ''' '''openflow.mock_discovery forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_loops, mock_link_discovery=True) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
97a799965e74f6add2eefab38a4e1a69699092df
students/forms.py
students/forms.py
from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean()
from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): # TODO: try catch KeyError here to avoid empty form error form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean()
Add todo note in ExclusiveRegistrationForm.
Add todo note in ExclusiveRegistrationForm.
Python
mit
muhummadPatel/raspied,muhummadPatel/raspied,muhummadPatel/raspied
from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean() Add todo note in ExclusiveRegistrationForm.
from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): # TODO: try catch KeyError here to avoid empty form error form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean()
<commit_before>from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean() <commit_msg>Add todo note in ExclusiveRegistrationForm.<commit_after>
from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): # TODO: try catch KeyError here to avoid empty form error form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean()
from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean() Add todo note in ExclusiveRegistrationForm.from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): # TODO: try catch KeyError here to avoid empty form error form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean()
<commit_before>from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean() <commit_msg>Add todo note in ExclusiveRegistrationForm.<commit_after>from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils.translation import ugettext_lazy as _ from registration.forms import RegistrationForm from .models import WhitelistedUsername User = get_user_model() class ExclusiveRegistrationForm(RegistrationForm): def clean(self): # TODO: try catch KeyError here to avoid empty form error form_username = self.cleaned_data['username'] try: # If this runs without raising an exception, then the username is in # our database of whitelisted usernames. WhitelistedUsername.objects.get(username=form_username.lower()) except ObjectDoesNotExist: err = ValidationError(_('Unrecognised student number. Are you a CS1 student at UCT?s'), code='invalid') self.add_error(User.USERNAME_FIELD, err) super(ExclusiveRegistrationForm, self).clean()
7a324d85ef76604c919c2c7e2f38fbda17b3d01c
docs/examples/led_travis.py
docs/examples/led_travis.py
from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' sleep(delay) # Sleep an hour before hitting travis again red = LED(12) green = LED(16) red.source = negated(green.values) green.source = build_passed() pause()
from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' red = LED(12) green = LED(16) green.source = build_passed('RPi-Distro/python-gpiozero') green.source_delay = 60 * 5 # check every 5 minutes red.source = negated(green.values) pause()
Use source_delay instead of sleep, and tidy up a bit
Use source_delay instead of sleep, and tidy up a bit
Python
bsd-3-clause
RPi-Distro/python-gpiozero,waveform80/gpio-zero,MrHarcombe/python-gpiozero
from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' sleep(delay) # Sleep an hour before hitting travis again red = LED(12) green = LED(16) red.source = negated(green.values) green.source = build_passed() pause() Use source_delay instead of sleep, and tidy up a bit
from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' red = LED(12) green = LED(16) green.source = build_passed('RPi-Distro/python-gpiozero') green.source_delay = 60 * 5 # check every 5 minutes red.source = negated(green.values) pause()
<commit_before>from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' sleep(delay) # Sleep an hour before hitting travis again red = LED(12) green = LED(16) red.source = negated(green.values) green.source = build_passed() pause() <commit_msg>Use source_delay instead of sleep, and tidy up a bit<commit_after>
from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' red = LED(12) green = LED(16) green.source = build_passed('RPi-Distro/python-gpiozero') green.source_delay = 60 * 5 # check every 5 minutes red.source = negated(green.values) pause()
from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' sleep(delay) # Sleep an hour before hitting travis again red = LED(12) green = LED(16) red.source = negated(green.values) green.source = build_passed() pause() Use source_delay instead of sleep, and tidy up a bitfrom travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' red = LED(12) green = LED(16) green.source = build_passed('RPi-Distro/python-gpiozero') green.source_delay = 60 * 5 # check every 5 minutes red.source = negated(green.values) pause()
<commit_before>from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo='RPi-Distro/python-gpiozero', delay=3600): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' sleep(delay) # Sleep an hour before hitting travis again red = LED(12) green = LED(16) red.source = negated(green.values) green.source = build_passed() pause() <commit_msg>Use source_delay instead of sleep, and tidy up a bit<commit_after>from travispy import TravisPy from gpiozero import LED from gpiozero.tools import negated from time import sleep from signal import pause def build_passed(repo): t = TravisPy() r = t.repo(repo) while True: yield r.last_build_state == 'passed' red = LED(12) green = LED(16) green.source = build_passed('RPi-Distro/python-gpiozero') green.source_delay = 60 * 5 # check every 5 minutes red.source = negated(green.values) pause()
7011a38826fcde520e4bf07f7089d9d1b75ee8f9
spec/openpassword/fudge_wrapper.py
spec/openpassword/fudge_wrapper.py
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) return super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
Fix bug on fudge wrapper
Fix bug on fudge wrapper
Python
mit
openpassword/blimey,openpassword/blimey
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException Fix bug on fudge wrapper
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) return super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
<commit_before>import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException <commit_msg>Fix bug on fudge wrapper<commit_after>
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) return super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException Fix bug on fudge wrapperimport fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) return super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
<commit_before>import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException <commit_msg>Fix bug on fudge wrapper<commit_after>import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) return super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
e979aab8ffdd5a2e86be7dd8fcacb5f10953a994
src/SeleniumLibrary/utils/types.py
src/SeleniumLibrary/utils/types.py
# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. import sys if sys.version_info[0] == 2: def is_string(item): return isinstance(item, (str, unicode)) else: from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE'
# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE'
Remove is_string because it exist in RF 2.9
Remove is_string because it exist in RF 2.9
Python
apache-2.0
robotframework/SeleniumLibrary,emanlove/robotframework-selenium2library,emanlove/robotframework-selenium2library,rtomac/robotframework-selenium2library,emanlove/robotframework-selenium2library,robotframework/SeleniumLibrary,robotframework/SeleniumLibrary,rtomac/robotframework-selenium2library,rtomac/robotframework-selenium2library
# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. import sys if sys.version_info[0] == 2: def is_string(item): return isinstance(item, (str, unicode)) else: from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE' Remove is_string because it exist in RF 2.9
# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE'
<commit_before># Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. import sys if sys.version_info[0] == 2: def is_string(item): return isinstance(item, (str, unicode)) else: from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE' <commit_msg>Remove is_string because it exist in RF 2.9<commit_after>
# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE'
# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. import sys if sys.version_info[0] == 2: def is_string(item): return isinstance(item, (str, unicode)) else: from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE' Remove is_string because it exist in RF 2.9# Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE'
<commit_before># Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. import sys if sys.version_info[0] == 2: def is_string(item): return isinstance(item, (str, unicode)) else: from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE' <commit_msg>Remove is_string because it exist in RF 2.9<commit_after># Copyright 2008-2011 Nokia Networks # Copyright 2011-2016 Ryan Tomac, Ed Manlove and contributors # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Originally based on Robot Framework 3.0.2 robot.utils.robottypes # Can be removed when library minimum required Robot Framework version is # greater than 3.0.2. Then Robot Framework is_truthy should also support # string NONE as Python False. from robot.utils import is_string def is_truthy(item): if is_string(item): return item.upper() not in ('FALSE', 'NO', '', 'NONE') return bool(item) def is_falsy(item): return not is_truthy(item) def is_noney(item): return item is None or is_string(item) and item.upper() == 'NONE'
4e243ade9b96c5ea6e68c27593fb578c52c85f1a
huffman.py
huffman.py
class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight
class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight def setRoot(self, root): self.root = root def setLeft(self, left): self.left = left def setRight(self, right): self.right = right def addCode(self, code): self.code = code + self.code
Add functions about setting the parent & children nodes and codes.
Add functions about setting the parent & children nodes and codes.
Python
mit
hane1818/Algorithm_HW3_huffman_code
class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight Add functions about setting the parent & children nodes and codes.
class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight def setRoot(self, root): self.root = root def setLeft(self, left): self.left = left def setRight(self, right): self.right = right def addCode(self, code): self.code = code + self.code
<commit_before>class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight <commit_msg>Add functions about setting the parent & children nodes and codes.<commit_after>
class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight def setRoot(self, root): self.root = root def setLeft(self, left): self.left = left def setRight(self, right): self.right = right def addCode(self, code): self.code = code + self.code
class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight Add functions about setting the parent & children nodes and codes.class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight def setRoot(self, root): self.root = root def setLeft(self, left): self.left = left def setRight(self, right): self.right = right def addCode(self, code): self.code = code + self.code
<commit_before>class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight <commit_msg>Add functions about setting the parent & children nodes and codes.<commit_after>class Node: def __init__(self): self.name = '' self.weight = 0 self.code = '' def initSet(self, name, weight): self.name = name self.weight = weight def setRoot(self, root): self.root = root def setLeft(self, left): self.left = left def setRight(self, right): self.right = right def addCode(self, code): self.code = code + self.code
6a5936a3d69c8af1b5878b824dec17c94fd1da95
masters/master.tryserver.chromium.gpu/master_site_config.py
masters/master.tryserver.chromium.gpu/master_site_config.py
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8020 slave_port = 8120 master_port_alt = 8220 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr'
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8021 slave_port = 8121 master_port_alt = 8221 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr'
Fix colliding ports for tryserver.chromium.gpu
Fix colliding ports for tryserver.chromium.gpu BUG=353434 Review URL: https://codereview.chromium.org/203743004 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@257737 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8020 slave_port = 8120 master_port_alt = 8220 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' Fix colliding ports for tryserver.chromium.gpu BUG=353434 Review URL: https://codereview.chromium.org/203743004 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@257737 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8021 slave_port = 8121 master_port_alt = 8221 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr'
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8020 slave_port = 8120 master_port_alt = 8220 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' <commit_msg>Fix colliding ports for tryserver.chromium.gpu BUG=353434 Review URL: https://codereview.chromium.org/203743004 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@257737 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8021 slave_port = 8121 master_port_alt = 8221 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr'
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8020 slave_port = 8120 master_port_alt = 8220 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' Fix colliding ports for tryserver.chromium.gpu BUG=353434 Review URL: https://codereview.chromium.org/203743004 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@257737 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8021 slave_port = 8121 master_port_alt = 8221 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr'
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8020 slave_port = 8120 master_port_alt = 8220 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' <commit_msg>Fix colliding ports for tryserver.chromium.gpu BUG=353434 Review URL: https://codereview.chromium.org/203743004 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@257737 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class GpuTryServer(Master.Master4): project_name = 'Chromium GPU Try Server' master_port = 8021 slave_port = 8121 master_port_alt = 8221 reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr'
c984183b7ea92dcd7106151bed43065504358dd0
tests/__init__.py
tests/__init__.py
import sys import os CRAFTAI_MODULE_SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path = [CRAFTAI_MODULE_SRC_DIR] + sys.path
Make sure the local version of the craft ai module is used in tests
Make sure the local version of the craft ai module is used in tests
Python
bsd-3-clause
craft-ai/craft-ai-client-python,craft-ai/craft-ai-client-python
Make sure the local version of the craft ai module is used in tests
import sys import os CRAFTAI_MODULE_SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path = [CRAFTAI_MODULE_SRC_DIR] + sys.path
<commit_before><commit_msg>Make sure the local version of the craft ai module is used in tests<commit_after>
import sys import os CRAFTAI_MODULE_SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path = [CRAFTAI_MODULE_SRC_DIR] + sys.path
Make sure the local version of the craft ai module is used in testsimport sys import os CRAFTAI_MODULE_SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path = [CRAFTAI_MODULE_SRC_DIR] + sys.path
<commit_before><commit_msg>Make sure the local version of the craft ai module is used in tests<commit_after>import sys import os CRAFTAI_MODULE_SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path = [CRAFTAI_MODULE_SRC_DIR] + sys.path
d72e13f22eab7bf61b56b1d5fa33b006c5f13299
tests/cli_test.py
tests/cli_test.py
from pork.cli import CLI from mock import Mock class TestCLI: def it_has_a_data_attribute(self): assert CLI().data is not None
from pork.cli import CLI, main from mock import Mock, patch from StringIO import StringIO patch.TEST_PREFIX = 'it' @patch('pork.cli.Data') class TestCLI: def it_has_a_data_attribute(self, Data): assert CLI().data is not None def it_sets_keys(self, Data): cli = CLI() cli.start(['foo', 'bar']) Data.return_value.set.assert_called_with('foo', 'bar') def it_gets_keys(self, Data): cli = CLI() cli.start(['foo']) Data.return_value.get.assert_called_with('foo') def it_deletes_keys(self, Data): cli = CLI() cli.start(['-d', 'foo']) Data.return_value.delete.assert_called_with('foo') @patch('sys.stdout', new_callable=StringIO) def it_prints_help_when_there_is_no_data(self, stdout, Data): Data.return_value.is_empty.return_value = True cli = CLI() cli.start([]) assert "Usage:" in stdout.getvalue() @patch('sys.stdout', new_callable=StringIO) def it_lists_all_keys_when_there_is_data(self, stdout, Data): Data.return_value.is_empty.return_value = False Data.return_value.list.return_value = { 'foo': 'bar', 'asdf': 'fdsa'} cli = CLI() cli.start([]) assert ' foo: bar\nasdf: fdsa\n' == stdout.getvalue() @patch('pork.cli.CLI') class TestMain: def it_calls_start_on_a_new_CLI_object(self, CLI): main() CLI.return_value.start.assert_called()
Add test coverage for pork.cli.
Add test coverage for pork.cli.
Python
mit
jimmycuadra/pork,jimmycuadra/pork
from pork.cli import CLI from mock import Mock class TestCLI: def it_has_a_data_attribute(self): assert CLI().data is not None Add test coverage for pork.cli.
from pork.cli import CLI, main from mock import Mock, patch from StringIO import StringIO patch.TEST_PREFIX = 'it' @patch('pork.cli.Data') class TestCLI: def it_has_a_data_attribute(self, Data): assert CLI().data is not None def it_sets_keys(self, Data): cli = CLI() cli.start(['foo', 'bar']) Data.return_value.set.assert_called_with('foo', 'bar') def it_gets_keys(self, Data): cli = CLI() cli.start(['foo']) Data.return_value.get.assert_called_with('foo') def it_deletes_keys(self, Data): cli = CLI() cli.start(['-d', 'foo']) Data.return_value.delete.assert_called_with('foo') @patch('sys.stdout', new_callable=StringIO) def it_prints_help_when_there_is_no_data(self, stdout, Data): Data.return_value.is_empty.return_value = True cli = CLI() cli.start([]) assert "Usage:" in stdout.getvalue() @patch('sys.stdout', new_callable=StringIO) def it_lists_all_keys_when_there_is_data(self, stdout, Data): Data.return_value.is_empty.return_value = False Data.return_value.list.return_value = { 'foo': 'bar', 'asdf': 'fdsa'} cli = CLI() cli.start([]) assert ' foo: bar\nasdf: fdsa\n' == stdout.getvalue() @patch('pork.cli.CLI') class TestMain: def it_calls_start_on_a_new_CLI_object(self, CLI): main() CLI.return_value.start.assert_called()
<commit_before>from pork.cli import CLI from mock import Mock class TestCLI: def it_has_a_data_attribute(self): assert CLI().data is not None <commit_msg>Add test coverage for pork.cli.<commit_after>
from pork.cli import CLI, main from mock import Mock, patch from StringIO import StringIO patch.TEST_PREFIX = 'it' @patch('pork.cli.Data') class TestCLI: def it_has_a_data_attribute(self, Data): assert CLI().data is not None def it_sets_keys(self, Data): cli = CLI() cli.start(['foo', 'bar']) Data.return_value.set.assert_called_with('foo', 'bar') def it_gets_keys(self, Data): cli = CLI() cli.start(['foo']) Data.return_value.get.assert_called_with('foo') def it_deletes_keys(self, Data): cli = CLI() cli.start(['-d', 'foo']) Data.return_value.delete.assert_called_with('foo') @patch('sys.stdout', new_callable=StringIO) def it_prints_help_when_there_is_no_data(self, stdout, Data): Data.return_value.is_empty.return_value = True cli = CLI() cli.start([]) assert "Usage:" in stdout.getvalue() @patch('sys.stdout', new_callable=StringIO) def it_lists_all_keys_when_there_is_data(self, stdout, Data): Data.return_value.is_empty.return_value = False Data.return_value.list.return_value = { 'foo': 'bar', 'asdf': 'fdsa'} cli = CLI() cli.start([]) assert ' foo: bar\nasdf: fdsa\n' == stdout.getvalue() @patch('pork.cli.CLI') class TestMain: def it_calls_start_on_a_new_CLI_object(self, CLI): main() CLI.return_value.start.assert_called()
from pork.cli import CLI from mock import Mock class TestCLI: def it_has_a_data_attribute(self): assert CLI().data is not None Add test coverage for pork.cli.from pork.cli import CLI, main from mock import Mock, patch from StringIO import StringIO patch.TEST_PREFIX = 'it' @patch('pork.cli.Data') class TestCLI: def it_has_a_data_attribute(self, Data): assert CLI().data is not None def it_sets_keys(self, Data): cli = CLI() cli.start(['foo', 'bar']) Data.return_value.set.assert_called_with('foo', 'bar') def it_gets_keys(self, Data): cli = CLI() cli.start(['foo']) Data.return_value.get.assert_called_with('foo') def it_deletes_keys(self, Data): cli = CLI() cli.start(['-d', 'foo']) Data.return_value.delete.assert_called_with('foo') @patch('sys.stdout', new_callable=StringIO) def it_prints_help_when_there_is_no_data(self, stdout, Data): Data.return_value.is_empty.return_value = True cli = CLI() cli.start([]) assert "Usage:" in stdout.getvalue() @patch('sys.stdout', new_callable=StringIO) def it_lists_all_keys_when_there_is_data(self, stdout, Data): Data.return_value.is_empty.return_value = False Data.return_value.list.return_value = { 'foo': 'bar', 'asdf': 'fdsa'} cli = CLI() cli.start([]) assert ' foo: bar\nasdf: fdsa\n' == stdout.getvalue() @patch('pork.cli.CLI') class TestMain: def it_calls_start_on_a_new_CLI_object(self, CLI): main() CLI.return_value.start.assert_called()
<commit_before>from pork.cli import CLI from mock import Mock class TestCLI: def it_has_a_data_attribute(self): assert CLI().data is not None <commit_msg>Add test coverage for pork.cli.<commit_after>from pork.cli import CLI, main from mock import Mock, patch from StringIO import StringIO patch.TEST_PREFIX = 'it' @patch('pork.cli.Data') class TestCLI: def it_has_a_data_attribute(self, Data): assert CLI().data is not None def it_sets_keys(self, Data): cli = CLI() cli.start(['foo', 'bar']) Data.return_value.set.assert_called_with('foo', 'bar') def it_gets_keys(self, Data): cli = CLI() cli.start(['foo']) Data.return_value.get.assert_called_with('foo') def it_deletes_keys(self, Data): cli = CLI() cli.start(['-d', 'foo']) Data.return_value.delete.assert_called_with('foo') @patch('sys.stdout', new_callable=StringIO) def it_prints_help_when_there_is_no_data(self, stdout, Data): Data.return_value.is_empty.return_value = True cli = CLI() cli.start([]) assert "Usage:" in stdout.getvalue() @patch('sys.stdout', new_callable=StringIO) def it_lists_all_keys_when_there_is_data(self, stdout, Data): Data.return_value.is_empty.return_value = False Data.return_value.list.return_value = { 'foo': 'bar', 'asdf': 'fdsa'} cli = CLI() cli.start([]) assert ' foo: bar\nasdf: fdsa\n' == stdout.getvalue() @patch('pork.cli.CLI') class TestMain: def it_calls_start_on_a_new_CLI_object(self, CLI): main() CLI.return_value.start.assert_called()
09931cfbba746daf5127b6113187042341e3be3d
tests/conftest.py
tests/conftest.py
import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", }
import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
Add more pytest fixtures (access_key, secret_key, account_id, timestamp)
Add more pytest fixtures (access_key, secret_key, account_id, timestamp)
Python
unlicense
GriceTurrble/python-amazon-mws,Bobspadger/python-amazon-mws
import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", } Add more pytest fixtures (access_key, secret_key, account_id, timestamp)
import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
<commit_before>import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", } <commit_msg>Add more pytest fixtures (access_key, secret_key, account_id, timestamp)<commit_after>
import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", } Add more pytest fixtures (access_key, secret_key, account_id, timestamp)import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
<commit_before>import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", } <commit_msg>Add more pytest fixtures (access_key, secret_key, account_id, timestamp)<commit_after>import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
17f6d104810f53a3ceac4943f3b80def3917b356
textx/__init__.py
textx/__init__.py
# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.generators import get_output_filename, gen_file from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0"
# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0"
Remove click dependent import from the main module.
Remove click dependent import from the main module. This leads to import error when textX is installed without CLI support.
Python
mit
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.generators import get_output_filename, gen_file from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0" Remove click dependent import from the main module. This leads to import error when textX is installed without CLI support.
# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0"
<commit_before># flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.generators import get_output_filename, gen_file from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0" <commit_msg>Remove click dependent import from the main module. This leads to import error when textX is installed without CLI support.<commit_after>
# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0"
# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.generators import get_output_filename, gen_file from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0" Remove click dependent import from the main module. This leads to import error when textX is installed without CLI support.# flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0"
<commit_before># flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.generators import get_output_filename, gen_file from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0" <commit_msg>Remove click dependent import from the main module. This leads to import error when textX is installed without CLI support.<commit_after># flake8: noqa from textx.metamodel import metamodel_from_file, metamodel_from_str from textx.model import get_children_of_type, get_parent_of_type, \ get_model, get_metamodel, get_children, get_location, textx_isinstance from textx.exceptions import TextXError, TextXSyntaxError, \ TextXSemanticError, TextXRegistrationError from textx.registration import (LanguageDesc, GeneratorDesc, register_language, register_generator, language_descriptions, language_description, generator_descriptions, generator_description, clear_language_registrations, clear_generator_registrations, languages_for_file, language_for_file, metamodel_for_language, metamodel_for_file, metamodels_for_file, generator_for_language_target, generator, language) __version__ = "2.3.0.dev0"
9619ecae61514bf1681425c503c38ccbe17f4b47
src/commoner/registration/admin.py
src/commoner/registration/admin.py
from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) admin.site.register(PartialRegistration, PartialRegistrationAdmin)
from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) fieldsets = ( (None, { 'fields':('last_name', 'first_name', 'email', 'complete', 'transaction_id', 'user'), 'description':'Adding a new registration will send a welcome email.'}, ), ) def save_model(self, request, obj, form, change): obj.save() if not change: # creating a new instance PartialRegistration.objects.send_welcome(obj) admin.site.register(PartialRegistration, PartialRegistrationAdmin)
Send welcome emails when registrations are added through the web interface.
Send welcome emails when registrations are added through the web interface.
Python
agpl-3.0
cc-archive/commoner,cc-archive/commoner
from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) admin.site.register(PartialRegistration, PartialRegistrationAdmin) Send welcome emails when registrations are added through the web interface.
from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) fieldsets = ( (None, { 'fields':('last_name', 'first_name', 'email', 'complete', 'transaction_id', 'user'), 'description':'Adding a new registration will send a welcome email.'}, ), ) def save_model(self, request, obj, form, change): obj.save() if not change: # creating a new instance PartialRegistration.objects.send_welcome(obj) admin.site.register(PartialRegistration, PartialRegistrationAdmin)
<commit_before>from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) admin.site.register(PartialRegistration, PartialRegistrationAdmin) <commit_msg>Send welcome emails when registrations are added through the web interface.<commit_after>
from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) fieldsets = ( (None, { 'fields':('last_name', 'first_name', 'email', 'complete', 'transaction_id', 'user'), 'description':'Adding a new registration will send a welcome email.'}, ), ) def save_model(self, request, obj, form, change): obj.save() if not change: # creating a new instance PartialRegistration.objects.send_welcome(obj) admin.site.register(PartialRegistration, PartialRegistrationAdmin)
from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) admin.site.register(PartialRegistration, PartialRegistrationAdmin) Send welcome emails when registrations are added through the web interface.from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) fieldsets = ( (None, { 'fields':('last_name', 'first_name', 'email', 'complete', 'transaction_id', 'user'), 'description':'Adding a new registration will send a welcome email.'}, ), ) def save_model(self, request, obj, form, change): obj.save() if not change: # creating a new instance PartialRegistration.objects.send_welcome(obj) admin.site.register(PartialRegistration, PartialRegistrationAdmin)
<commit_before>from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) admin.site.register(PartialRegistration, PartialRegistrationAdmin) <commit_msg>Send welcome emails when registrations are added through the web interface.<commit_after>from django.contrib import admin from commoner.registration.models import PartialRegistration class PartialRegistrationAdmin(admin.ModelAdmin): list_filter = ('complete',) fieldsets = ( (None, { 'fields':('last_name', 'first_name', 'email', 'complete', 'transaction_id', 'user'), 'description':'Adding a new registration will send a welcome email.'}, ), ) def save_model(self, request, obj, form, change): obj.save() if not change: # creating a new instance PartialRegistration.objects.send_welcome(obj) admin.site.register(PartialRegistration, PartialRegistrationAdmin)
9d7b7d70402894e07a00356ea8921c098b41ee24
soapbox/templatetags/soapbox.py
soapbox/templatetags/soapbox.py
from django import template from ..models import Message register = template.Library() @register.assignment_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url)
from django import template from ..models import Message register = template.Library() @register.simple_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url)
Address removal of assignment_tag in Django 2.0
Address removal of assignment_tag in Django 2.0
Python
bsd-3-clause
ubernostrum/django-soapbox,ubernostrum/django-soapbox
from django import template from ..models import Message register = template.Library() @register.assignment_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url) Address removal of assignment_tag in Django 2.0
from django import template from ..models import Message register = template.Library() @register.simple_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url)
<commit_before>from django import template from ..models import Message register = template.Library() @register.assignment_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url) <commit_msg>Address removal of assignment_tag in Django 2.0<commit_after>
from django import template from ..models import Message register = template.Library() @register.simple_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url)
from django import template from ..models import Message register = template.Library() @register.assignment_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url) Address removal of assignment_tag in Django 2.0from django import template from ..models import Message register = template.Library() @register.simple_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url)
<commit_before>from django import template from ..models import Message register = template.Library() @register.assignment_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url) <commit_msg>Address removal of assignment_tag in Django 2.0<commit_after>from django import template from ..models import Message register = template.Library() @register.simple_tag(takes_context=True) def get_messages_for_page(context, url): if url == context.template.engine.string_if_invalid: return [] return Message.objects.match(url)
3426f160d24f98a897149110bb6b67891e73dcca
tests/test_gen_addons_table.py
tests/test_gen_addons_table.py
import os import subprocess import unittest class TestGenAddonsTable(unittest.TestCase): def test_1(self): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') gen_addons_table = os.path.join(dirname, '..', 'tools', 'gen_addons_table.py') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') readme_before = open(readme_filename).read() readme_expected_filename = os.path.join(dirname, 'test_repo', 'README.md.expected') readme_expected = open(readme_expected_filename).read() try: res = subprocess.call([gen_addons_table], cwd=cwd) self.assertEquals(res, 0, 'gen_addons_table failed') readme_after = open(readme_filename).read() self.assertEquals(readme_after, readme_expected, 'gen_addons_table did not generate ' 'expected result') finally: open(readme_filename, 'w').write(readme_before)
import os import subprocess import sys def test_1(): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') with open(readme_filename) as f: readme_before = f.read() readme_expected_filename = os.path.join( dirname, 'test_repo', 'README.md.expected', ) with open(readme_expected_filename) as f: readme_expected = f.read() try: res = subprocess.call([ sys.executable, '-m', 'tools.gen_addons_table', ], cwd=cwd) assert res == 0 with open(readme_filename) as f: readme_after = f.read() assert readme_after == readme_expected finally: with open(readme_filename, 'w') as f: f.write(readme_before)
Make gen_addons_table test generate coverage
Make gen_addons_table test generate coverage This is done by invoking subprocess with sys.executable, pytest-cov does the rest. Also change test style to pytest instead of unittest.
Python
agpl-3.0
Yajo/maintainer-tools,Yajo/maintainer-tools,OCA/maintainer-tools,OCA/maintainer-tools,OCA/maintainer-tools,OCA/maintainer-tools,acsone/maintainer-tools,acsone/maintainer-tools,Yajo/maintainer-tools,acsone/maintainer-tools,acsone/maintainers-tools,Yajo/maintainer-tools,acsone/maintainer-tools,acsone/maintainers-tools,acsone/maintainers-tools
import os import subprocess import unittest class TestGenAddonsTable(unittest.TestCase): def test_1(self): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') gen_addons_table = os.path.join(dirname, '..', 'tools', 'gen_addons_table.py') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') readme_before = open(readme_filename).read() readme_expected_filename = os.path.join(dirname, 'test_repo', 'README.md.expected') readme_expected = open(readme_expected_filename).read() try: res = subprocess.call([gen_addons_table], cwd=cwd) self.assertEquals(res, 0, 'gen_addons_table failed') readme_after = open(readme_filename).read() self.assertEquals(readme_after, readme_expected, 'gen_addons_table did not generate ' 'expected result') finally: open(readme_filename, 'w').write(readme_before) Make gen_addons_table test generate coverage This is done by invoking subprocess with sys.executable, pytest-cov does the rest. Also change test style to pytest instead of unittest.
import os import subprocess import sys def test_1(): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') with open(readme_filename) as f: readme_before = f.read() readme_expected_filename = os.path.join( dirname, 'test_repo', 'README.md.expected', ) with open(readme_expected_filename) as f: readme_expected = f.read() try: res = subprocess.call([ sys.executable, '-m', 'tools.gen_addons_table', ], cwd=cwd) assert res == 0 with open(readme_filename) as f: readme_after = f.read() assert readme_after == readme_expected finally: with open(readme_filename, 'w') as f: f.write(readme_before)
<commit_before>import os import subprocess import unittest class TestGenAddonsTable(unittest.TestCase): def test_1(self): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') gen_addons_table = os.path.join(dirname, '..', 'tools', 'gen_addons_table.py') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') readme_before = open(readme_filename).read() readme_expected_filename = os.path.join(dirname, 'test_repo', 'README.md.expected') readme_expected = open(readme_expected_filename).read() try: res = subprocess.call([gen_addons_table], cwd=cwd) self.assertEquals(res, 0, 'gen_addons_table failed') readme_after = open(readme_filename).read() self.assertEquals(readme_after, readme_expected, 'gen_addons_table did not generate ' 'expected result') finally: open(readme_filename, 'w').write(readme_before) <commit_msg>Make gen_addons_table test generate coverage This is done by invoking subprocess with sys.executable, pytest-cov does the rest. Also change test style to pytest instead of unittest.<commit_after>
import os import subprocess import sys def test_1(): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') with open(readme_filename) as f: readme_before = f.read() readme_expected_filename = os.path.join( dirname, 'test_repo', 'README.md.expected', ) with open(readme_expected_filename) as f: readme_expected = f.read() try: res = subprocess.call([ sys.executable, '-m', 'tools.gen_addons_table', ], cwd=cwd) assert res == 0 with open(readme_filename) as f: readme_after = f.read() assert readme_after == readme_expected finally: with open(readme_filename, 'w') as f: f.write(readme_before)
import os import subprocess import unittest class TestGenAddonsTable(unittest.TestCase): def test_1(self): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') gen_addons_table = os.path.join(dirname, '..', 'tools', 'gen_addons_table.py') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') readme_before = open(readme_filename).read() readme_expected_filename = os.path.join(dirname, 'test_repo', 'README.md.expected') readme_expected = open(readme_expected_filename).read() try: res = subprocess.call([gen_addons_table], cwd=cwd) self.assertEquals(res, 0, 'gen_addons_table failed') readme_after = open(readme_filename).read() self.assertEquals(readme_after, readme_expected, 'gen_addons_table did not generate ' 'expected result') finally: open(readme_filename, 'w').write(readme_before) Make gen_addons_table test generate coverage This is done by invoking subprocess with sys.executable, pytest-cov does the rest. Also change test style to pytest instead of unittest.import os import subprocess import sys def test_1(): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') with open(readme_filename) as f: readme_before = f.read() readme_expected_filename = os.path.join( dirname, 'test_repo', 'README.md.expected', ) with open(readme_expected_filename) as f: readme_expected = f.read() try: res = subprocess.call([ sys.executable, '-m', 'tools.gen_addons_table', ], cwd=cwd) assert res == 0 with open(readme_filename) as f: readme_after = f.read() assert readme_after == readme_expected finally: with open(readme_filename, 'w') as f: f.write(readme_before)
<commit_before>import os import subprocess import unittest class TestGenAddonsTable(unittest.TestCase): def test_1(self): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') gen_addons_table = os.path.join(dirname, '..', 'tools', 'gen_addons_table.py') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') readme_before = open(readme_filename).read() readme_expected_filename = os.path.join(dirname, 'test_repo', 'README.md.expected') readme_expected = open(readme_expected_filename).read() try: res = subprocess.call([gen_addons_table], cwd=cwd) self.assertEquals(res, 0, 'gen_addons_table failed') readme_after = open(readme_filename).read() self.assertEquals(readme_after, readme_expected, 'gen_addons_table did not generate ' 'expected result') finally: open(readme_filename, 'w').write(readme_before) <commit_msg>Make gen_addons_table test generate coverage This is done by invoking subprocess with sys.executable, pytest-cov does the rest. Also change test style to pytest instead of unittest.<commit_after>import os import subprocess import sys def test_1(): dirname = os.path.dirname(__file__) cwd = os.path.join(dirname, 'test_repo') readme_filename = os.path.join(dirname, 'test_repo', 'README.md') with open(readme_filename) as f: readme_before = f.read() readme_expected_filename = os.path.join( dirname, 'test_repo', 'README.md.expected', ) with open(readme_expected_filename) as f: readme_expected = f.read() try: res = subprocess.call([ sys.executable, '-m', 'tools.gen_addons_table', ], cwd=cwd) assert res == 0 with open(readme_filename) as f: readme_after = f.read() assert readme_after == readme_expected finally: with open(readme_filename, 'w') as f: f.write(readme_before)
8157ee43f31bd106d00c86ac4a7bc35a79c29e41
django_payzen/app_settings.py
django_payzen/app_settings.py
""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID', None) CLIENT_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE', None) VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None)
""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID') VADS_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE') VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None)
Set essential payzen settings as mandatory for django_payzen.
Set essential payzen settings as mandatory for django_payzen.
Python
mit
zehome/django-payzen,bsvetchine/django-payzen,zehome/django-payzen,bsvetchine/django-payzen
""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID', None) CLIENT_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE', None) VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None) Set essential payzen settings as mandatory for django_payzen.
""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID') VADS_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE') VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None)
<commit_before>""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID', None) CLIENT_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE', None) VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None) <commit_msg>Set essential payzen settings as mandatory for django_payzen.<commit_after>
""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID') VADS_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE') VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None)
""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID', None) CLIENT_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE', None) VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None) Set essential payzen settings as mandatory for django_payzen.""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID') VADS_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE') VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None)
<commit_before>""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID', None) CLIENT_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE', None) VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None) <commit_msg>Set essential payzen settings as mandatory for django_payzen.<commit_after>""" Default payzen settings. All settings should be set in the django settings file and not directly in this file. We deliberately do not set up default values here in order to force user to setup explicitely the default behaviour.""" from django.conf import settings PAYZEN_REQUEST_URL = 'https://secure.payzen.eu/vads-payment/' VADS_CONTRIB = 'django-payzen v0.9' VADS_SITE_ID = getattr(settings, 'VADS_SITE_ID') VADS_CERTIFICATE = getattr(settings, 'VADS_CERTIFICATE') VADS_CURRENCY = getattr(settings, 'VADS_CURRENCY', None) VADS_ACTION_MODE = getattr(settings, 'VADS_ACTION_MODE', None) VADS_CTX_MODE = getattr(settings, 'VADS_CTX_MODE', None)
b03b71505469f8234dc18fdc653311cd63be252c
dallinger/prolific.py
dallinger/prolific.py
import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. return False
import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. # Set up bonus # response = requests.post(blah) # Process bonus previously set up # bonus_id = study_id # ? maybe? # payment_endpoint = ( # f"https://api.prolific.co/api/v1/bulk-bonus-payments/{bonus_id}/pay/" # ) # response = requests.post(payment_endpoint) return False
Add sketch of bonus payment process
Add sketch of bonus payment process
Python
mit
Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger
import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. return False Add sketch of bonus payment process
import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. # Set up bonus # response = requests.post(blah) # Process bonus previously set up # bonus_id = study_id # ? maybe? # payment_endpoint = ( # f"https://api.prolific.co/api/v1/bulk-bonus-payments/{bonus_id}/pay/" # ) # response = requests.post(payment_endpoint) return False
<commit_before>import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. return False <commit_msg>Add sketch of bonus payment process<commit_after>
import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. # Set up bonus # response = requests.post(blah) # Process bonus previously set up # bonus_id = study_id # ? maybe? # payment_endpoint = ( # f"https://api.prolific.co/api/v1/bulk-bonus-payments/{bonus_id}/pay/" # ) # response = requests.post(payment_endpoint) return False
import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. return False Add sketch of bonus payment processimport logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. # Set up bonus # response = requests.post(blah) # Process bonus previously set up # bonus_id = study_id # ? maybe? # payment_endpoint = ( # f"https://api.prolific.co/api/v1/bulk-bonus-payments/{bonus_id}/pay/" # ) # response = requests.post(payment_endpoint) return False
<commit_before>import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. return False <commit_msg>Add sketch of bonus payment process<commit_after>import logging logger = logging.getLogger(__file__) class ProlificServiceException(Exception): """Some error from Prolific""" pass class ProlificService: """Wrapper for Prolific REST API""" def __init__(self, prolific_api_token: str): self.api_token = prolific_api_token def create_study(options: dict) -> dict: """Create a Study on Prolific, and return info about it.""" # TODO The work. return {} def grant_bonus(study_id: str, worker_id: str, amount: float) -> bool: """Pay a worker a bonus""" amount_str = "{:.2f}".format(amount) payload = { "study_id": study_id, "csv_bonuses": f"{worker_id},{amount_str}\n", # ? trailing newline? } logger.info(f"Would be sending bonus request: {payload}") # TODO Actually make request, etc. # Set up bonus # response = requests.post(blah) # Process bonus previously set up # bonus_id = study_id # ? maybe? # payment_endpoint = ( # f"https://api.prolific.co/api/v1/bulk-bonus-payments/{bonus_id}/pay/" # ) # response = requests.post(payment_endpoint) return False
80d1126418af0890a36a4bac9ddf53a2ab40e851
cpp_coveralls/report.py
cpp_coveralls/report.py
from __future__ import absolute_import from __future__ import print_function import requests import json URL = 'https://coveralls.io/api/v1/jobs' def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0
from __future__ import absolute_import from __future__ import print_function import requests import json import os URL = os.getenv('COVERALLS_ENDPOINT', 'https://coveralls.io') + "/api/v1/jobs" def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0
Support COVERALLS_ENDPOINT for Enterprise usage
Support COVERALLS_ENDPOINT for Enterprise usage
Python
apache-2.0
eddyxu/cpp-coveralls,eddyxu/cpp-coveralls
from __future__ import absolute_import from __future__ import print_function import requests import json URL = 'https://coveralls.io/api/v1/jobs' def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0 Support COVERALLS_ENDPOINT for Enterprise usage
from __future__ import absolute_import from __future__ import print_function import requests import json import os URL = os.getenv('COVERALLS_ENDPOINT', 'https://coveralls.io') + "/api/v1/jobs" def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0
<commit_before>from __future__ import absolute_import from __future__ import print_function import requests import json URL = 'https://coveralls.io/api/v1/jobs' def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0 <commit_msg>Support COVERALLS_ENDPOINT for Enterprise usage<commit_after>
from __future__ import absolute_import from __future__ import print_function import requests import json import os URL = os.getenv('COVERALLS_ENDPOINT', 'https://coveralls.io') + "/api/v1/jobs" def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0
from __future__ import absolute_import from __future__ import print_function import requests import json URL = 'https://coveralls.io/api/v1/jobs' def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0 Support COVERALLS_ENDPOINT for Enterprise usagefrom __future__ import absolute_import from __future__ import print_function import requests import json import os URL = os.getenv('COVERALLS_ENDPOINT', 'https://coveralls.io') + "/api/v1/jobs" def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0
<commit_before>from __future__ import absolute_import from __future__ import print_function import requests import json URL = 'https://coveralls.io/api/v1/jobs' def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0 <commit_msg>Support COVERALLS_ENDPOINT for Enterprise usage<commit_after>from __future__ import absolute_import from __future__ import print_function import requests import json import os URL = os.getenv('COVERALLS_ENDPOINT', 'https://coveralls.io') + "/api/v1/jobs" def post_report(coverage): """Post coverage report to coveralls.io.""" response = requests.post(URL, files={'json_file': json.dumps(coverage)}) try: result = response.json() except ValueError: result = {'error': 'Failure to submit data. ' 'Response [%(status)s]: %(text)s' % { 'status': response.status_code, 'text': response.text}} print(result) if 'error' in result: return result['error'] return 0
03dc4f221aa9909c8a3074cbef9fd1816e0cc86c
stagecraft/libs/mass_update/data_set_mass_update.py
stagecraft/libs/mass_update/data_set_mass_update.py
from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): model_filter = DataSet.objects if 'data_type' in query: data_type = cls._get_model_instance_by_name( DataType, query['data_type']) model_filter = model_filter.filter(data_type=data_type) if 'data_group' in query: data_group = cls._get_model_instance_by_name( DataGroup, query['data_group']) model_filter = model_filter.filter(data_group=data_group) model_filter.update(bearer_token=new_token) @classmethod def _get_model_instance_by_name(cls, model, name): return model.objects.get(name=name)
from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(object): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): cls(query).update(bearer_token=new_token) def __init__(self, query_dict): self.model_filter = DataSet.objects if 'data_type' in query_dict: data_type = self._get_model_instance_by_name( DataType, query_dict['data_type']) self.model_filter = self.model_filter.filter(data_type=data_type) if 'data_group' in query_dict: data_group = self._get_model_instance_by_name( DataGroup, query_dict['data_group']) self.model_filter = self.model_filter.filter(data_group=data_group) def update(self, **kwargs): self.model_filter.update(**kwargs) def _get_model_instance_by_name(self, model, name): return model.objects.get(name=name)
Refactor query out into instance with delegates the update
Refactor query out into instance with delegates the update
Python
mit
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): model_filter = DataSet.objects if 'data_type' in query: data_type = cls._get_model_instance_by_name( DataType, query['data_type']) model_filter = model_filter.filter(data_type=data_type) if 'data_group' in query: data_group = cls._get_model_instance_by_name( DataGroup, query['data_group']) model_filter = model_filter.filter(data_group=data_group) model_filter.update(bearer_token=new_token) @classmethod def _get_model_instance_by_name(cls, model, name): return model.objects.get(name=name) Refactor query out into instance with delegates the update
from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(object): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): cls(query).update(bearer_token=new_token) def __init__(self, query_dict): self.model_filter = DataSet.objects if 'data_type' in query_dict: data_type = self._get_model_instance_by_name( DataType, query_dict['data_type']) self.model_filter = self.model_filter.filter(data_type=data_type) if 'data_group' in query_dict: data_group = self._get_model_instance_by_name( DataGroup, query_dict['data_group']) self.model_filter = self.model_filter.filter(data_group=data_group) def update(self, **kwargs): self.model_filter.update(**kwargs) def _get_model_instance_by_name(self, model, name): return model.objects.get(name=name)
<commit_before>from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): model_filter = DataSet.objects if 'data_type' in query: data_type = cls._get_model_instance_by_name( DataType, query['data_type']) model_filter = model_filter.filter(data_type=data_type) if 'data_group' in query: data_group = cls._get_model_instance_by_name( DataGroup, query['data_group']) model_filter = model_filter.filter(data_group=data_group) model_filter.update(bearer_token=new_token) @classmethod def _get_model_instance_by_name(cls, model, name): return model.objects.get(name=name) <commit_msg>Refactor query out into instance with delegates the update<commit_after>
from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(object): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): cls(query).update(bearer_token=new_token) def __init__(self, query_dict): self.model_filter = DataSet.objects if 'data_type' in query_dict: data_type = self._get_model_instance_by_name( DataType, query_dict['data_type']) self.model_filter = self.model_filter.filter(data_type=data_type) if 'data_group' in query_dict: data_group = self._get_model_instance_by_name( DataGroup, query_dict['data_group']) self.model_filter = self.model_filter.filter(data_group=data_group) def update(self, **kwargs): self.model_filter.update(**kwargs) def _get_model_instance_by_name(self, model, name): return model.objects.get(name=name)
from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): model_filter = DataSet.objects if 'data_type' in query: data_type = cls._get_model_instance_by_name( DataType, query['data_type']) model_filter = model_filter.filter(data_type=data_type) if 'data_group' in query: data_group = cls._get_model_instance_by_name( DataGroup, query['data_group']) model_filter = model_filter.filter(data_group=data_group) model_filter.update(bearer_token=new_token) @classmethod def _get_model_instance_by_name(cls, model, name): return model.objects.get(name=name) Refactor query out into instance with delegates the updatefrom stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(object): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): cls(query).update(bearer_token=new_token) def __init__(self, query_dict): self.model_filter = DataSet.objects if 'data_type' in query_dict: data_type = self._get_model_instance_by_name( DataType, query_dict['data_type']) self.model_filter = self.model_filter.filter(data_type=data_type) if 'data_group' in query_dict: data_group = self._get_model_instance_by_name( DataGroup, query_dict['data_group']) self.model_filter = self.model_filter.filter(data_group=data_group) def update(self, **kwargs): self.model_filter.update(**kwargs) def _get_model_instance_by_name(self, model, name): return model.objects.get(name=name)
<commit_before>from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): model_filter = DataSet.objects if 'data_type' in query: data_type = cls._get_model_instance_by_name( DataType, query['data_type']) model_filter = model_filter.filter(data_type=data_type) if 'data_group' in query: data_group = cls._get_model_instance_by_name( DataGroup, query['data_group']) model_filter = model_filter.filter(data_group=data_group) model_filter.update(bearer_token=new_token) @classmethod def _get_model_instance_by_name(cls, model, name): return model.objects.get(name=name) <commit_msg>Refactor query out into instance with delegates the update<commit_after>from stagecraft.apps.datasets.models import DataGroup, DataSet, DataType class DataSetMassUpdate(object): @classmethod def update_bearer_token_for_data_type_or_group_name(cls, query, new_token): cls(query).update(bearer_token=new_token) def __init__(self, query_dict): self.model_filter = DataSet.objects if 'data_type' in query_dict: data_type = self._get_model_instance_by_name( DataType, query_dict['data_type']) self.model_filter = self.model_filter.filter(data_type=data_type) if 'data_group' in query_dict: data_group = self._get_model_instance_by_name( DataGroup, query_dict['data_group']) self.model_filter = self.model_filter.filter(data_group=data_group) def update(self, **kwargs): self.model_filter.update(**kwargs) def _get_model_instance_by_name(self, model, name): return model.objects.get(name=name)
f78e20b05a5d7ede84f80a9be16a6a40a1a7abf8
ifs/cli.py
ifs/cli.py
import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): click.echo(lib.install(application)) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli()
import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): cmd = lib.install(application) click.echo(cmd.output) exit(cmd.returncode) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli()
Exit with return code from called script
Exit with return code from called script
Python
isc
cbednarski/ifs-python,cbednarski/ifs-python
import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): click.echo(lib.install(application)) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli() Exit with return code from called script
import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): cmd = lib.install(application) click.echo(cmd.output) exit(cmd.returncode) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli()
<commit_before>import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): click.echo(lib.install(application)) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli() <commit_msg>Exit with return code from called script<commit_after>
import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): cmd = lib.install(application) click.echo(cmd.output) exit(cmd.returncode) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli()
import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): click.echo(lib.install(application)) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli() Exit with return code from called scriptimport click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): cmd = lib.install(application) click.echo(cmd.output) exit(cmd.returncode) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli()
<commit_before>import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): click.echo(lib.install(application)) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli() <commit_msg>Exit with return code from called script<commit_after>import click import lib @click.group() def cli(): """ Install From Source When the version in the package manager has gone stale, get a fresh, production-ready version from a source tarball or precompiled archive. Send issues or improvements to https://github.com/cbednarski/ifs """ pass @cli.command() def ls(): for app in lib.list_apps(): click.echo(app) @cli.command() @click.argument('term') def search(term): for app in lib.list_apps(): if term in app: click.echo(app) @cli.command() @click.argument('application') def install(application): cmd = lib.install(application) click.echo(cmd.output) exit(cmd.returncode) @cli.command() @click.argument('application') def info(application): """ Show information about an application from ifs ls """ info = lib.app_info(application) for k, v in info.iteritems(): if type(v) is list: v = ' '.join(v) click.echo('%s: %s' % (k, v)) if __name__ == '__main__': cli()
c8ef9e7271796239de2878bbf40a2c2d388427e4
bayesian_methods_for_hackers/simulate_messages_ch02.py
bayesian_methods_for_hackers/simulate_messages_ch02.py
import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json' matplotlib.rcParams.update(json.load(open(matplotlibrc_path))) tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main()
import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main()
Change of repo name. Update effected paths
Change of repo name. Update effected paths
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json' matplotlib.rcParams.update(json.load(open(matplotlibrc_path))) tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main() Change of repo name. Update effected paths
import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main()
<commit_before>import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json' matplotlib.rcParams.update(json.load(open(matplotlibrc_path))) tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main() <commit_msg>Change of repo name. Update effected paths<commit_after>
import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main()
import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json' matplotlib.rcParams.update(json.load(open(matplotlibrc_path))) tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main() Change of repo name. Update effected pathsimport json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main()
<commit_before>import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): matplotlibrc_path = '/home/noel/repo/playground/matplotlibrc.json' matplotlib.rcParams.update(json.load(open(matplotlibrc_path))) tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main() <commit_msg>Change of repo name. Update effected paths<commit_after>import json import matplotlib import numpy as np import pymc as pm from matplotlib import pyplot as plt def main(): tau = pm.rdiscrete_uniform(0, 80) print tau alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) print lambda_1, lambda_2 data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] def plot_artificial_sms_dataset(): tau = pm.rdiscrete_uniform(0, 80) alpha = 1. / 20. lambda_1, lambda_2 = pm.rexponential(alpha, 2) data = np.r_[pm.rpoisson(lambda_1, tau), pm.rpoisson(lambda_2, 80 - tau)] plt.bar(np.arange(80), data, color="#348ABD") plt.bar(tau - 1, data[tau - 1], color="r", label="user behaviour changed") plt.xlim(0, 80) plt.title("More example of artificial datasets") for i in range(1, 5): plt.subplot(4, 1, i) plot_artificial_sms_dataset() plt.show() if __name__ == '__main__': main()
4f9ddcd07dbf5a84f183e7a84a8819bde062dbcf
example/_find_fuse_parts.py
example/_find_fuse_parts.py
import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """)
import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) print(sys.path) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """)
Troubleshoot potential sys.path problem with python 3.x
Troubleshoot potential sys.path problem with python 3.x
Python
lgpl-2.1
libfuse/python-fuse,libfuse/python-fuse
import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """) Troubleshoot potential sys.path problem with python 3.x
import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) print(sys.path) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """)
<commit_before>import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """) <commit_msg>Troubleshoot potential sys.path problem with python 3.x<commit_after>
import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) print(sys.path) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """)
import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """) Troubleshoot potential sys.path problem with python 3.ximport sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) print(sys.path) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """)
<commit_before>import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """) <commit_msg>Troubleshoot potential sys.path problem with python 3.x<commit_after>import sys, os, glob from os.path import realpath, dirname, join from traceback import format_exception PYTHON_MAJOR_MINOR = "%s.%s" % (sys.version_info[0], sys.version_info[1]) ddd = realpath(join(dirname(sys.argv[0]), '..')) for d in [ddd, '.']: for p in glob.glob(join(d, 'build', 'lib.*%s' % PYTHON_MAJOR_MINOR)): sys.path.insert(0, p) print(sys.path) try: import fuse except ImportError: raise RuntimeError(""" ! Got exception: """ + "".join([ "> " + x for x in format_exception(*sys.exc_info()) ]) + """ ! Have you ran `python setup.py build'? ! ! We've done our best to find the necessary components of the FUSE bindings ! even if it's not installed, we've got no clue what went wrong for you... """)
360ded396e5febbb4871797dd6d676884e24299a
api/setup.py
api/setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ [(os.path.join('share/humbug/', relpath), glob.glob(os.path.join(relpath, '*'))) for relpath in glob.glob("integrations/*")] + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], )
#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup def recur_expand(target_root, dir): for root, _, files in os.walk(dir): paths = [os.path.join(root, f) for f in files] if len(paths): yield os.path.join(target_root, root), paths setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ list(recur_expand('share/humbug', 'integrations/')) + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], )
Include folders with subfolders when creating api tarball
Include folders with subfolders when creating api tarball (imported from commit b9d564a6cc4ee6e2afa0108b6d9f18af039fc8cf)
Python
apache-2.0
jeffcao/zulip,vabs22/zulip,verma-varsha/zulip,jphilipsen05/zulip,KJin99/zulip,DazWorrall/zulip,johnny9/zulip,so0k/zulip,suxinde2009/zulip,proliming/zulip,mdavid/zulip,susansls/zulip,zofuthan/zulip,pradiptad/zulip,vikas-parashar/zulip,joshisa/zulip,amallia/zulip,voidException/zulip,esander91/zulip,bssrdf/zulip,calvinleenyc/zulip,Gabriel0402/zulip,littledogboy/zulip,babbage/zulip,DazWorrall/zulip,noroot/zulip,JPJPJPOPOP/zulip,gkotian/zulip,wavelets/zulip,Suninus/zulip,amyliu345/zulip,peguin40/zulip,levixie/zulip,m1ssou/zulip,kokoar/zulip,jonesgithub/zulip,udxxabp/zulip,jonesgithub/zulip,kokoar/zulip,christi3k/zulip,LeeRisk/zulip,wweiradio/zulip,hayderimran7/zulip,pradiptad/zulip,AZtheAsian/zulip,xuxiao/zulip,babbage/zulip,sharmaeklavya2/zulip,zofuthan/zulip,sharmaeklavya2/zulip,hafeez3000/zulip,huangkebo/zulip,hackerkid/zulip,atomic-labs/zulip,xuanhan863/zulip,Galexrt/zulip,dattatreya303/zulip,KingxBanana/zulip,dattatreya303/zulip,samatdav/zulip,thomasboyt/zulip,wweiradio/zulip,Gabriel0402/zulip,luyifan/zulip,TigorC/zulip,vabs22/zulip,alliejones/zulip,Qgap/zulip,johnnygaddarr/zulip,karamcnair/zulip,mansilladev/zulip,hengqujushi/zulip,huangkebo/zulip,amanharitsh123/zulip,jerryge/zulip,glovebx/zulip,sup95/zulip,ahmadassaf/zulip,ashwinirudrappa/zulip,seapasulli/zulip,johnny9/zulip,tbutter/zulip,joshisa/zulip,ufosky-server/zulip,showell/zulip,wangdeshui/zulip,EasonYi/zulip,ufosky-server/zulip,littledogboy/zulip,Qgap/zulip,codeKonami/zulip,arpitpanwar/zulip,MayB/zulip,lfranchi/zulip,zhaoweigg/zulip,firstblade/zulip,armooo/zulip,adnanh/zulip,hackerkid/zulip,hj3938/zulip,zorojean/zulip,Batterfii/zulip,JanzTam/zulip,amyliu345/zulip,jrowan/zulip,PaulPetring/zulip,bssrdf/zulip,joshisa/zulip,mahim97/zulip,timabbott/zulip,jrowan/zulip,paxapy/zulip,peiwei/zulip,johnnygaddarr/zulip,sonali0901/zulip,grave-w-grave/zulip,bluesea/zulip,aliceriot/zulip,cosmicAsymmetry/zulip,mohsenSy/zulip,vakila/zulip,fw1121/zulip,moria/zulip,aps-sids/zulip,samatdav/zulip,jimmy54/zulip,zwily/zulip,dotcool/zulip,themass/zulip,samatdav/zulip,kokoar/zulip,guiquanz/zulip,schatt/zulip,kaiyuanheshang/zulip,tbutter/zulip,avastu/zulip,luyifan/zulip,LeeRisk/zulip,glovebx/zulip,rishig/zulip,hayderimran7/zulip,PaulPetring/zulip,glovebx/zulip,souravbadami/zulip,wdaher/zulip,nicholasbs/zulip,ryansnowboarder/zulip,stamhe/zulip,glovebx/zulip,huangkebo/zulip,kou/zulip,dwrpayne/zulip,JanzTam/zulip,joyhchen/zulip,andersk/zulip,jerryge/zulip,Qgap/zulip,dotcool/zulip,dattatreya303/zulip,easyfmxu/zulip,MariaFaBella85/zulip,littledogboy/zulip,dnmfarrell/zulip,moria/zulip,itnihao/zulip,PhilSk/zulip,firstblade/zulip,umkay/zulip,amanharitsh123/zulip,kou/zulip,MayB/zulip,ufosky-server/zulip,zhaoweigg/zulip,samatdav/zulip,tdr130/zulip,vikas-parashar/zulip,ahmadassaf/zulip,calvinleenyc/zulip,vaidap/zulip,esander91/zulip,johnny9/zulip,ikasumiwt/zulip,ashwinirudrappa/zulip,jimmy54/zulip,so0k/zulip,aliceriot/zulip,timabbott/zulip,bluesea/zulip,verma-varsha/zulip,dotcool/zulip,PhilSk/zulip,ericzhou2008/zulip,developerfm/zulip,shubhamdhama/zulip,udxxabp/zulip,armooo/zulip,jeffcao/zulip,susansls/zulip,Diptanshu8/zulip,amallia/zulip,ryanbackman/zulip,brainwane/zulip,bastianh/zulip,udxxabp/zulip,johnny9/zulip,jessedhillon/zulip,deer-hope/zulip,jonesgithub/zulip,ipernet/zulip,shaunstanislaus/zulip,RobotCaleb/zulip,dnmfarrell/zulip,moria/zulip,Frouk/zulip,j831/zulip,arpith/zulip,bowlofstew/zulip,ryansnowboarder/zulip,kaiyuanheshang/zulip,jrowan/zulip,guiquanz/zulip,bluesea/zulip,hafeez3000/zulip,fw1121/zulip,susansls/zulip,gigawhitlocks/zulip,yocome/zulip,hj3938/zulip,hj3938/zulip,qq1012803704/zulip,rishig/zulip,rht/zulip,babbage/zulip,xuanhan863/zulip,joshisa/zulip,mahim97/zulip,ericzhou2008/zulip,guiquanz/zulip,dawran6/zulip,avastu/zulip,umkay/zulip,natanovia/zulip,saitodisse/zulip,shaunstanislaus/zulip,SmartPeople/zulip,wavelets/zulip,SmartPeople/zulip,bitemyapp/zulip,firstblade/zulip,bowlofstew/zulip,Jianchun1/zulip,verma-varsha/zulip,ipernet/zulip,Diptanshu8/zulip,swinghu/zulip,arpith/zulip,arpitpanwar/zulip,jimmy54/zulip,jphilipsen05/zulip,LAndreas/zulip,johnnygaddarr/zulip,luyifan/zulip,TigorC/zulip,codeKonami/zulip,bitemyapp/zulip,johnnygaddarr/zulip,umkay/zulip,dawran6/zulip,dnmfarrell/zulip,sonali0901/zulip,punchagan/zulip,shrikrishnaholla/zulip,peguin40/zulip,xuanhan863/zulip,jeffcao/zulip,schatt/zulip,zulip/zulip,suxinde2009/zulip,SmartPeople/zulip,armooo/zulip,qq1012803704/zulip,zhaoweigg/zulip,eeshangarg/zulip,nicholasbs/zulip,jessedhillon/zulip,hustlzp/zulip,zachallaun/zulip,proliming/zulip,avastu/zulip,xuxiao/zulip,peguin40/zulip,technicalpickles/zulip,synicalsyntax/zulip,jackrzhang/zulip,yocome/zulip,ipernet/zulip,MariaFaBella85/zulip,showell/zulip,esander91/zulip,praveenaki/zulip,gkotian/zulip,swinghu/zulip,LeeRisk/zulip,qq1012803704/zulip,zacps/zulip,ikasumiwt/zulip,KJin99/zulip,swinghu/zulip,dxq-git/zulip,jimmy54/zulip,brockwhittaker/zulip,armooo/zulip,willingc/zulip,Vallher/zulip,johnnygaddarr/zulip,reyha/zulip,jackrzhang/zulip,jainayush975/zulip,so0k/zulip,ApsOps/zulip,seapasulli/zulip,Batterfii/zulip,AZtheAsian/zulip,tiansiyuan/zulip,hayderimran7/zulip,mdavid/zulip,hafeez3000/zulip,vaidap/zulip,EasonYi/zulip,brockwhittaker/zulip,zofuthan/zulip,aliceriot/zulip,voidException/zulip,zorojean/zulip,aakash-cr7/zulip,voidException/zulip,Diptanshu8/zulip,niftynei/zulip,sharmaeklavya2/zulip,ryanbackman/zulip,KingxBanana/zulip,paxapy/zulip,vaidap/zulip,rishig/zulip,mohsenSy/zulip,isht3/zulip,bluesea/zulip,samatdav/zulip,synicalsyntax/zulip,ApsOps/zulip,praveenaki/zulip,technicalpickles/zulip,cosmicAsymmetry/zulip,wdaher/zulip,stamhe/zulip,peguin40/zulip,ikasumiwt/zulip,christi3k/zulip,fw1121/zulip,yuvipanda/zulip,akuseru/zulip,alliejones/zulip,willingc/zulip,ApsOps/zulip,firstblade/zulip,j831/zulip,lfranchi/zulip,natanovia/zulip,Drooids/zulip,bluesea/zulip,rht/zulip,yuvipanda/zulip,grave-w-grave/zulip,DazWorrall/zulip,yuvipanda/zulip,hj3938/zulip,andersk/zulip,dnmfarrell/zulip,dwrpayne/zulip,schatt/zulip,mohsenSy/zulip,Jianchun1/zulip,brockwhittaker/zulip,amallia/zulip,isht3/zulip,tdr130/zulip,ApsOps/zulip,rishig/zulip,paxapy/zulip,vabs22/zulip,brainwane/zulip,swinghu/zulip,stamhe/zulip,lfranchi/zulip,themass/zulip,wdaher/zulip,noroot/zulip,huangkebo/zulip,mansilladev/zulip,tdr130/zulip,wweiradio/zulip,brainwane/zulip,dhcrzf/zulip,shaunstanislaus/zulip,moria/zulip,deer-hope/zulip,eastlhu/zulip,Gabriel0402/zulip,hengqujushi/zulip,noroot/zulip,hackerkid/zulip,bitemyapp/zulip,xuxiao/zulip,easyfmxu/zulip,deer-hope/zulip,reyha/zulip,xuanhan863/zulip,seapasulli/zulip,Vallher/zulip,johnnygaddarr/zulip,themass/zulip,wdaher/zulip,TigorC/zulip,xuxiao/zulip,hackerkid/zulip,esander91/zulip,cosmicAsymmetry/zulip,JPJPJPOPOP/zulip,JanzTam/zulip,stamhe/zulip,ryansnowboarder/zulip,umkay/zulip,fw1121/zulip,Frouk/zulip,luyifan/zulip,hustlzp/zulip,wavelets/zulip,LeeRisk/zulip,ryanbackman/zulip,arpitpanwar/zulip,ApsOps/zulip,rht/zulip,easyfmxu/zulip,jphilipsen05/zulip,Galexrt/zulip,EasonYi/zulip,peiwei/zulip,shaunstanislaus/zulip,MayB/zulip,blaze225/zulip,wdaher/zulip,joyhchen/zulip,dwrpayne/zulip,m1ssou/zulip,jackrzhang/zulip,xuxiao/zulip,voidException/zulip,akuseru/zulip,saitodisse/zulip,bowlofstew/zulip,synicalsyntax/zulip,Batterfii/zulip,Suninus/zulip,MayB/zulip,wweiradio/zulip,glovebx/zulip,gkotian/zulip,aliceriot/zulip,jrowan/zulip,ahmadassaf/zulip,dnmfarrell/zulip,jimmy54/zulip,wangdeshui/zulip,tommyip/zulip,vakila/zulip,mansilladev/zulip,andersk/zulip,calvinleenyc/zulip,zacps/zulip,Galexrt/zulip,peiwei/zulip,noroot/zulip,huangkebo/zulip,TigorC/zulip,amallia/zulip,LeeRisk/zulip,technicalpickles/zulip,Batterfii/zulip,alliejones/zulip,j831/zulip,reyha/zulip,bitemyapp/zulip,zachallaun/zulip,ericzhou2008/zulip,ashwinirudrappa/zulip,Gabriel0402/zulip,joyhchen/zulip,zorojean/zulip,DazWorrall/zulip,LeeRisk/zulip,sonali0901/zulip,EasonYi/zulip,susansls/zulip,rht/zulip,tommyip/zulip,Frouk/zulip,kou/zulip,punchagan/zulip,udxxabp/zulip,shrikrishnaholla/zulip,Cheppers/zulip,vikas-parashar/zulip,he15his/zulip,itnihao/zulip,KJin99/zulip,mahim97/zulip,pradiptad/zulip,kaiyuanheshang/zulip,amyliu345/zulip,dnmfarrell/zulip,andersk/zulip,atomic-labs/zulip,DazWorrall/zulip,lfranchi/zulip,christi3k/zulip,ryanbackman/zulip,showell/zulip,willingc/zulip,swinghu/zulip,alliejones/zulip,souravbadami/zulip,dawran6/zulip,RobotCaleb/zulip,yuvipanda/zulip,krtkmj/zulip,alliejones/zulip,deer-hope/zulip,Drooids/zulip,ufosky-server/zulip,shubhamdhama/zulip,schatt/zulip,firstblade/zulip,brainwane/zulip,dwrpayne/zulip,yuvipanda/zulip,tiansiyuan/zulip,itnihao/zulip,levixie/zulip,aakash-cr7/zulip,amyliu345/zulip,esander91/zulip,LAndreas/zulip,hustlzp/zulip,proliming/zulip,jphilipsen05/zulip,LeeRisk/zulip,tbutter/zulip,developerfm/zulip,vikas-parashar/zulip,atomic-labs/zulip,vakila/zulip,Qgap/zulip,kokoar/zulip,hengqujushi/zulip,jonesgithub/zulip,niftynei/zulip,aakash-cr7/zulip,Suninus/zulip,calvinleenyc/zulip,synicalsyntax/zulip,so0k/zulip,tdr130/zulip,easyfmxu/zulip,themass/zulip,dotcool/zulip,jrowan/zulip,RobotCaleb/zulip,atomic-labs/zulip,arpitpanwar/zulip,guiquanz/zulip,verma-varsha/zulip,hengqujushi/zulip,akuseru/zulip,schatt/zulip,PaulPetring/zulip,tommyip/zulip,stamhe/zulip,dhcrzf/zulip,wweiradio/zulip,ufosky-server/zulip,Frouk/zulip,hayderimran7/zulip,arpitpanwar/zulip,shaunstanislaus/zulip,babbage/zulip,gkotian/zulip,Drooids/zulip,adnanh/zulip,rht/zulip,PhilSk/zulip,hengqujushi/zulip,he15his/zulip,gigawhitlocks/zulip,umkay/zulip,vabs22/zulip,christi3k/zulip,DazWorrall/zulip,tbutter/zulip,KingxBanana/zulip,hayderimran7/zulip,mohsenSy/zulip,Frouk/zulip,JanzTam/zulip,LAndreas/zulip,jackrzhang/zulip,shubhamdhama/zulip,ericzhou2008/zulip,bitemyapp/zulip,hackerkid/zulip,seapasulli/zulip,Cheppers/zulip,yocome/zulip,amanharitsh123/zulip,zwily/zulip,JanzTam/zulip,atomic-labs/zulip,zhaoweigg/zulip,adnanh/zulip,joyhchen/zulip,udxxabp/zulip,arpitpanwar/zulip,jerryge/zulip,codeKonami/zulip,dattatreya303/zulip,vikas-parashar/zulip,shrikrishnaholla/zulip,punchagan/zulip,tdr130/zulip,firstblade/zulip,zhaoweigg/zulip,Galexrt/zulip,krtkmj/zulip,seapasulli/zulip,KJin99/zulip,jessedhillon/zulip,PaulPetring/zulip,RobotCaleb/zulip,bluesea/zulip,Batterfii/zulip,grave-w-grave/zulip,MariaFaBella85/zulip,brainwane/zulip,tiansiyuan/zulip,punchagan/zulip,MayB/zulip,eastlhu/zulip,PhilSk/zulip,Suninus/zulip,hayderimran7/zulip,ahmadassaf/zulip,noroot/zulip,AZtheAsian/zulip,bssrdf/zulip,jackrzhang/zulip,itnihao/zulip,shaunstanislaus/zulip,tommyip/zulip,deer-hope/zulip,themass/zulip,zorojean/zulip,nicholasbs/zulip,Vallher/zulip,tommyip/zulip,he15his/zulip,showell/zulip,xuanhan863/zulip,j831/zulip,praveenaki/zulip,hustlzp/zulip,ericzhou2008/zulip,blaze225/zulip,so0k/zulip,he15his/zulip,rishig/zulip,krtkmj/zulip,arpith/zulip,MayB/zulip,proliming/zulip,itnihao/zulip,jeffcao/zulip,he15his/zulip,praveenaki/zulip,sharmaeklavya2/zulip,Vallher/zulip,KingxBanana/zulip,voidException/zulip,kou/zulip,xuxiao/zulip,hackerkid/zulip,hayderimran7/zulip,ericzhou2008/zulip,RobotCaleb/zulip,littledogboy/zulip,mdavid/zulip,PaulPetring/zulip,AZtheAsian/zulip,ryansnowboarder/zulip,levixie/zulip,zulip/zulip,ashwinirudrappa/zulip,technicalpickles/zulip,easyfmxu/zulip,amanharitsh123/zulip,wdaher/zulip,eeshangarg/zulip,praveenaki/zulip,LAndreas/zulip,umkay/zulip,TigorC/zulip,EasonYi/zulip,pradiptad/zulip,hackerkid/zulip,tiansiyuan/zulip,karamcnair/zulip,PhilSk/zulip,calvinleenyc/zulip,zhaoweigg/zulip,m1ssou/zulip,ryansnowboarder/zulip,saitodisse/zulip,bowlofstew/zulip,Juanvulcano/zulip,eastlhu/zulip,levixie/zulip,bssrdf/zulip,Galexrt/zulip,Galexrt/zulip,wweiradio/zulip,shrikrishnaholla/zulip,arpitpanwar/zulip,dawran6/zulip,schatt/zulip,developerfm/zulip,bowlofstew/zulip,developerfm/zulip,hustlzp/zulip,wavelets/zulip,eeshangarg/zulip,peguin40/zulip,EasonYi/zulip,niftynei/zulip,peiwei/zulip,mansilladev/zulip,zorojean/zulip,praveenaki/zulip,guiquanz/zulip,armooo/zulip,willingc/zulip,JPJPJPOPOP/zulip,jessedhillon/zulip,johnny9/zulip,christi3k/zulip,synicalsyntax/zulip,timabbott/zulip,dotcool/zulip,ashwinirudrappa/zulip,ryanbackman/zulip,hengqujushi/zulip,isht3/zulip,aps-sids/zulip,zacps/zulip,zofuthan/zulip,dawran6/zulip,bastianh/zulip,Frouk/zulip,zwily/zulip,aliceriot/zulip,tiansiyuan/zulip,shubhamdhama/zulip,zwily/zulip,andersk/zulip,wavelets/zulip,tommyip/zulip,easyfmxu/zulip,zachallaun/zulip,reyha/zulip,wweiradio/zulip,natanovia/zulip,zorojean/zulip,Diptanshu8/zulip,KJin99/zulip,KingxBanana/zulip,rht/zulip,seapasulli/zulip,m1ssou/zulip,joyhchen/zulip,zulip/zulip,jainayush975/zulip,sonali0901/zulip,ipernet/zulip,ApsOps/zulip,bastianh/zulip,willingc/zulip,avastu/zulip,umkay/zulip,grave-w-grave/zulip,eeshangarg/zulip,blaze225/zulip,gigawhitlocks/zulip,zofuthan/zulip,wangdeshui/zulip,arpith/zulip,natanovia/zulip,zulip/zulip,tbutter/zulip,gigawhitlocks/zulip,vaidap/zulip,thomasboyt/zulip,lfranchi/zulip,atomic-labs/zulip,gkotian/zulip,joyhchen/zulip,blaze225/zulip,kaiyuanheshang/zulip,SmartPeople/zulip,cosmicAsymmetry/zulip,tdr130/zulip,Juanvulcano/zulip,vabs22/zulip,grave-w-grave/zulip,hengqujushi/zulip,huangkebo/zulip,tiansiyuan/zulip,souravbadami/zulip,peiwei/zulip,Cheppers/zulip,andersk/zulip,levixie/zulip,jeffcao/zulip,vakila/zulip,amallia/zulip,RobotCaleb/zulip,isht3/zulip,Suninus/zulip,brainwane/zulip,amallia/zulip,dwrpayne/zulip,jphilipsen05/zulip,Frouk/zulip,bastianh/zulip,zachallaun/zulip,ahmadassaf/zulip,bowlofstew/zulip,karamcnair/zulip,Jianchun1/zulip,codeKonami/zulip,adnanh/zulip,gkotian/zulip,Drooids/zulip,bastianh/zulip,proliming/zulip,saitodisse/zulip,zachallaun/zulip,sup95/zulip,TigorC/zulip,susansls/zulip,joshisa/zulip,souravbadami/zulip,Juanvulcano/zulip,guiquanz/zulip,MayB/zulip,akuseru/zulip,DazWorrall/zulip,krtkmj/zulip,levixie/zulip,zorojean/zulip,themass/zulip,dxq-git/zulip,rishig/zulip,mohsenSy/zulip,zhaoweigg/zulip,AZtheAsian/zulip,dhcrzf/zulip,ikasumiwt/zulip,mdavid/zulip,adnanh/zulip,Cheppers/zulip,fw1121/zulip,so0k/zulip,themass/zulip,praveenaki/zulip,moria/zulip,mahim97/zulip,tommyip/zulip,vabs22/zulip,shrikrishnaholla/zulip,PaulPetring/zulip,codeKonami/zulip,codeKonami/zulip,lfranchi/zulip,timabbott/zulip,dhcrzf/zulip,timabbott/zulip,thomasboyt/zulip,zwily/zulip,dnmfarrell/zulip,natanovia/zulip,gkotian/zulip,ashwinirudrappa/zulip,joshisa/zulip,luyifan/zulip,ericzhou2008/zulip,MariaFaBella85/zulip,esander91/zulip,ufosky-server/zulip,wangdeshui/zulip,joshisa/zulip,fw1121/zulip,ipernet/zulip,udxxabp/zulip,sup95/zulip,JanzTam/zulip,timabbott/zulip,luyifan/zulip,kou/zulip,ApsOps/zulip,calvinleenyc/zulip,swinghu/zulip,yocome/zulip,aps-sids/zulip,hj3938/zulip,Juanvulcano/zulip,developerfm/zulip,bastianh/zulip,SmartPeople/zulip,suxinde2009/zulip,qq1012803704/zulip,JPJPJPOPOP/zulip,johnny9/zulip,Jianchun1/zulip,Gabriel0402/zulip,jonesgithub/zulip,niftynei/zulip,gigawhitlocks/zulip,akuseru/zulip,yocome/zulip,Drooids/zulip,jainayush975/zulip,Drooids/zulip,armooo/zulip,aakash-cr7/zulip,Cheppers/zulip,armooo/zulip,kokoar/zulip,avastu/zulip,brockwhittaker/zulip,reyha/zulip,Cheppers/zulip,willingc/zulip,jackrzhang/zulip,jeffcao/zulip,Vallher/zulip,swinghu/zulip,hustlzp/zulip,Qgap/zulip,ipernet/zulip,littledogboy/zulip,eeshangarg/zulip,amyliu345/zulip,mansilladev/zulip,dotcool/zulip,synicalsyntax/zulip,kaiyuanheshang/zulip,wavelets/zulip,mdavid/zulip,brockwhittaker/zulip,voidException/zulip,paxapy/zulip,jessedhillon/zulip,kokoar/zulip,kaiyuanheshang/zulip,isht3/zulip,suxinde2009/zulip,hj3938/zulip,paxapy/zulip,udxxabp/zulip,deer-hope/zulip,zachallaun/zulip,atomic-labs/zulip,jessedhillon/zulip,bitemyapp/zulip,zulip/zulip,qq1012803704/zulip,thomasboyt/zulip,m1ssou/zulip,LAndreas/zulip,glovebx/zulip,Qgap/zulip,arpith/zulip,aliceriot/zulip,Gabriel0402/zulip,hafeez3000/zulip,he15his/zulip,j831/zulip,thomasboyt/zulip,stamhe/zulip,xuxiao/zulip,karamcnair/zulip,jainayush975/zulip,shubhamdhama/zulip,mdavid/zulip,LAndreas/zulip,verma-varsha/zulip,mohsenSy/zulip,amyliu345/zulip,RobotCaleb/zulip,vakila/zulip,jimmy54/zulip,dxq-git/zulip,voidException/zulip,arpith/zulip,karamcnair/zulip,samatdav/zulip,hafeez3000/zulip,wangdeshui/zulip,aakash-cr7/zulip,saitodisse/zulip,alliejones/zulip,suxinde2009/zulip,Jianchun1/zulip,dhcrzf/zulip,akuseru/zulip,sup95/zulip,deer-hope/zulip,moria/zulip,eastlhu/zulip,tdr130/zulip,bssrdf/zulip,tbutter/zulip,PhilSk/zulip,punchagan/zulip,proliming/zulip,punchagan/zulip,sharmaeklavya2/zulip,jonesgithub/zulip,natanovia/zulip,eeshangarg/zulip,jessedhillon/zulip,souravbadami/zulip,jrowan/zulip,karamcnair/zulip,paxapy/zulip,qq1012803704/zulip,aps-sids/zulip,Jianchun1/zulip,sharmaeklavya2/zulip,vakila/zulip,karamcnair/zulip,technicalpickles/zulip,synicalsyntax/zulip,KingxBanana/zulip,kaiyuanheshang/zulip,SmartPeople/zulip,Batterfii/zulip,Galexrt/zulip,zofuthan/zulip,dhcrzf/zulip,zachallaun/zulip,nicholasbs/zulip,babbage/zulip,pradiptad/zulip,souravbadami/zulip,peiwei/zulip,huangkebo/zulip,adnanh/zulip,avastu/zulip,ikasumiwt/zulip,bowlofstew/zulip,aps-sids/zulip,gigawhitlocks/zulip,technicalpickles/zulip,brockwhittaker/zulip,technicalpickles/zulip,christi3k/zulip,tbutter/zulip,wangdeshui/zulip,vaidap/zulip,punchagan/zulip,schatt/zulip,glovebx/zulip,babbage/zulip,ryansnowboarder/zulip,eastlhu/zulip,willingc/zulip,qq1012803704/zulip,seapasulli/zulip,esander91/zulip,kokoar/zulip,aps-sids/zulip,thomasboyt/zulip,Qgap/zulip,yuvipanda/zulip,dxq-git/zulip,guiquanz/zulip,Cheppers/zulip,peguin40/zulip,PaulPetring/zulip,zwily/zulip,timabbott/zulip,dwrpayne/zulip,kou/zulip,mahim97/zulip,rht/zulip,itnihao/zulip,so0k/zulip,dxq-git/zulip,jonesgithub/zulip,aps-sids/zulip,m1ssou/zulip,jeffcao/zulip,hj3938/zulip,suxinde2009/zulip,avastu/zulip,developerfm/zulip,jainayush975/zulip,akuseru/zulip,ryanbackman/zulip,dotcool/zulip,sup95/zulip,wavelets/zulip,eastlhu/zulip,amallia/zulip,sonali0901/zulip,saitodisse/zulip,alliejones/zulip,jerryge/zulip,mdavid/zulip,mansilladev/zulip,jerryge/zulip,Suninus/zulip,zacps/zulip,jphilipsen05/zulip,showell/zulip,codeKonami/zulip,ufosky-server/zulip,nicholasbs/zulip,suxinde2009/zulip,littledogboy/zulip,rishig/zulip,jainayush975/zulip,andersk/zulip,sonali0901/zulip,krtkmj/zulip,zulip/zulip,ikasumiwt/zulip,isht3/zulip,littledogboy/zulip,bitemyapp/zulip,dawran6/zulip,Batterfii/zulip,LAndreas/zulip,firstblade/zulip,jerryge/zulip,sup95/zulip,blaze225/zulip,nicholasbs/zulip,tiansiyuan/zulip,jimmy54/zulip,bssrdf/zulip,xuanhan863/zulip,reyha/zulip,luyifan/zulip,zacps/zulip,levixie/zulip,vakila/zulip,xuanhan863/zulip,kou/zulip,yocome/zulip,EasonYi/zulip,pradiptad/zulip,vikas-parashar/zulip,bluesea/zulip,johnny9/zulip,noroot/zulip,jackrzhang/zulip,MariaFaBella85/zulip,KJin99/zulip,Vallher/zulip,hustlzp/zulip,ahmadassaf/zulip,easyfmxu/zulip,zacps/zulip,brainwane/zulip,ipernet/zulip,ikasumiwt/zulip,cosmicAsymmetry/zulip,showell/zulip,showell/zulip,Drooids/zulip,MariaFaBella85/zulip,he15his/zulip,aakash-cr7/zulip,Juanvulcano/zulip,dhcrzf/zulip,peiwei/zulip,jerryge/zulip,stamhe/zulip,vaidap/zulip,m1ssou/zulip,niftynei/zulip,pradiptad/zulip,fw1121/zulip,krtkmj/zulip,aliceriot/zulip,niftynei/zulip,amanharitsh123/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,yuvipanda/zulip,adnanh/zulip,gigawhitlocks/zulip,shubhamdhama/zulip,developerfm/zulip,hafeez3000/zulip,amanharitsh123/zulip,zulip/zulip,bssrdf/zulip,eeshangarg/zulip,MariaFaBella85/zulip,JPJPJPOPOP/zulip,mahim97/zulip,lfranchi/zulip,zofuthan/zulip,bastianh/zulip,saitodisse/zulip,j831/zulip,KJin99/zulip,moria/zulip,Diptanshu8/zulip,noroot/zulip,blaze225/zulip,dxq-git/zulip,Juanvulcano/zulip,ahmadassaf/zulip,nicholasbs/zulip,wangdeshui/zulip,hafeez3000/zulip,wdaher/zulip,zwily/zulip,proliming/zulip,dxq-git/zulip,dattatreya303/zulip,grave-w-grave/zulip,shrikrishnaholla/zulip,babbage/zulip,susansls/zulip,mansilladev/zulip,verma-varsha/zulip,ryansnowboarder/zulip,Suninus/zulip,JPJPJPOPOP/zulip,eastlhu/zulip,Vallher/zulip,shubhamdhama/zulip,dwrpayne/zulip,shrikrishnaholla/zulip,yocome/zulip,ashwinirudrappa/zulip,natanovia/zulip,AZtheAsian/zulip,thomasboyt/zulip,JanzTam/zulip,dattatreya303/zulip,shaunstanislaus/zulip,Gabriel0402/zulip,Diptanshu8/zulip,itnihao/zulip,johnnygaddarr/zulip
#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ [(os.path.join('share/humbug/', relpath), glob.glob(os.path.join(relpath, '*'))) for relpath in glob.glob("integrations/*")] + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], ) Include folders with subfolders when creating api tarball (imported from commit b9d564a6cc4ee6e2afa0108b6d9f18af039fc8cf)
#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup def recur_expand(target_root, dir): for root, _, files in os.walk(dir): paths = [os.path.join(root, f) for f in files] if len(paths): yield os.path.join(target_root, root), paths setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ list(recur_expand('share/humbug', 'integrations/')) + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ [(os.path.join('share/humbug/', relpath), glob.glob(os.path.join(relpath, '*'))) for relpath in glob.glob("integrations/*")] + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], ) <commit_msg>Include folders with subfolders when creating api tarball (imported from commit b9d564a6cc4ee6e2afa0108b6d9f18af039fc8cf)<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup def recur_expand(target_root, dir): for root, _, files in os.walk(dir): paths = [os.path.join(root, f) for f in files] if len(paths): yield os.path.join(target_root, root), paths setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ list(recur_expand('share/humbug', 'integrations/')) + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], )
#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ [(os.path.join('share/humbug/', relpath), glob.glob(os.path.join(relpath, '*'))) for relpath in glob.glob("integrations/*")] + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], ) Include folders with subfolders when creating api tarball (imported from commit b9d564a6cc4ee6e2afa0108b6d9f18af039fc8cf)#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup def recur_expand(target_root, dir): for root, _, files in os.walk(dir): paths = [os.path.join(root, f) for f in files] if len(paths): yield os.path.join(target_root, root), paths setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ list(recur_expand('share/humbug', 'integrations/')) + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ [(os.path.join('share/humbug/', relpath), glob.glob(os.path.join(relpath, '*'))) for relpath in glob.glob("integrations/*")] + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], ) <commit_msg>Include folders with subfolders when creating api tarball (imported from commit b9d564a6cc4ee6e2afa0108b6d9f18af039fc8cf)<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import humbug import glob import os from distutils.core import setup def recur_expand(target_root, dir): for root, _, files in os.walk(dir): paths = [os.path.join(root, f) for f in files] if len(paths): yield os.path.join(target_root, root), paths setup(name='humbug', version=humbug.__version__, description='Bindings for the Humbug message API', author='Humbug, Inc.', author_email='humbug@humbughq.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Communications :: Chat', ], url='https://humbughq.com/dist/api/', packages=['humbug'], data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \ list(recur_expand('share/humbug', 'integrations/')) + \ [('share/humbug/demos', [os.path.join("demos", relpath) for relpath in os.listdir("demos")])], scripts=["bin/humbug-send"], )
b22e96cc1e5daded4841b39d31ebefd1df86f26a
corehq/apps/hqadmin/migrations/0017_hqdeploy_commit.py
corehq/apps/hqadmin/migrations/0017_hqdeploy_commit.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-07-21 13:42 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ]
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ]
Remove unnecessary imports based on review
Remove unnecessary imports based on review
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-07-21 13:42 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ] Remove unnecessary imports based on review
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-07-21 13:42 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ] <commit_msg>Remove unnecessary imports based on review<commit_after>
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-07-21 13:42 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ] Remove unnecessary imports based on reviewfrom django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-07-21 13:42 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ] <commit_msg>Remove unnecessary imports based on review<commit_after>from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hqadmin', '0016_hqdeploy_ordering'), ] operations = [ migrations.AddField( model_name='hqdeploy', name='commit', field=models.CharField(max_length=255, null=True), ), ]
50c6e4a44a2451970c8e6286e1acb74dad78365c
example.py
example.py
import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ print(args) self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect()
import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect()
Switch off debugging func that got through...
Switch off debugging func that got through...
Python
mit
sarenji/pyrc
import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ print(args) self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect() Switch off debugging func that got through...
import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect()
<commit_before>import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ print(args) self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect() <commit_msg>Switch off debugging func that got through...<commit_after>
import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect()
import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ print(args) self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect() Switch off debugging func that got through...import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect()
<commit_before>import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ print(args) self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect() <commit_msg>Switch off debugging func that got through...<commit_after>import pyrc import pyrc.utils.hooks as hooks class GangstaBot(pyrc.Bot): @hooks.command() def bling(self, channel, sender): "will print yo" self.message(channel, "%s: yo" % sender) @hooks.command("^repeat\s+(?P<msg>.+)$") def repeat(self, channel, sender, **kwargs): "will repeat whatever yo say" self.message(channel, "%s: %s" % (sender, kwargs["msg"])) @hooks.privmsg("(lol|lmao|rofl(mao)?)") def stopword(self, channel, sender, *args): """ will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message """ self.message(channel, args[0]) @hooks.interval(10000) def keeprepeating(self): "will say something" self.message("#turntechgodhead", "stop repeating myself") if __name__ == '__main__': bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead']) bot.connect()
8f4988f5a0873dc818b8ad2c7de3e0f71d544cde
bayohwoolph.py
bayohwoolph.py
#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='%', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token'))
#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='$', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token'))
Change command prefix to $, so that's like a tip.
Change command prefix to $, so that's like a tip.
Python
agpl-3.0
freiheit/Bay-Oh-Woolph,dark-echo/Bay-Oh-Woolph
#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='%', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token')) Change command prefix to $, so that's like a tip.
#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='$', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token'))
<commit_before>#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='%', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token')) <commit_msg>Change command prefix to $, so that's like a tip.<commit_after>
#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='$', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token'))
#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='%', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token')) Change command prefix to $, so that's like a tip.#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='$', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token'))
<commit_before>#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='%', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token')) <commit_msg>Change command prefix to $, so that's like a tip.<commit_after>#!/usr/bin/python3 import asyncio import configparser import discord import os from discord.ext import commands # Parse the config and stick in global "config" var config = configparser.ConfigParser() for inifile in [os.path.expanduser('~')+'/.bayohwoolph.ini','bayohwoolph.local.ini','bayohwoolph.ini']: if os.path.isfile(inifile): config.read(inifile) break # First config file wins MAIN = config['MAIN'] description = '''Dark Echo's barkeep''' bot = commands.Bot(command_prefix='$', description=description) @bot.event @asyncio.coroutine def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------') bot.run(MAIN.get('login_token'))
4cad1d743f2c70c3ee046b59d98aecb6b5b301d6
src/event_manager/views/base.py
src/event_manager/views/base.py
from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {})
from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) def register_user(request): pass
Create shell function for register_user
Create shell function for register_user
Python
agpl-3.0
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) Create shell function for register_user
from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) def register_user(request): pass
<commit_before>from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) <commit_msg>Create shell function for register_user<commit_after>
from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) def register_user(request): pass
from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) Create shell function for register_userfrom django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) def register_user(request): pass
<commit_before>from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) <commit_msg>Create shell function for register_user<commit_after>from django.shortcuts import render, redirect from django.http import * from django.contrib.auth import authenticate, login def home(request): return render(request, 'login.html', {}) def login_user(request): logout(request) username = "" password = "" if request.POST: username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect('/e/') return render(request, 'login.html', {}) def register_user(request): pass
6abd349fa0392cd5518d3f01942289ae0527d8f4
__init__.py
__init__.py
# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from nffg import NFFG, NFFGToolBox __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"]
# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from networkx.release import get_info, major as nx_major # Enabled imports directly from nffg_lib package from nffg import NFFG, NFFGToolBox if int(nx_major) > 1: raise RuntimeError( "NetworkX version(<2.0): %s is not supported!" % get_info()[2]) __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"]
Add NetworkX version checking to nffg_lib package
Add NetworkX version checking to nffg_lib package
Python
apache-2.0
hsnlab/nffg,5GExchange/nffg
# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from nffg import NFFG, NFFGToolBox __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"] Add NetworkX version checking to nffg_lib package
# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from networkx.release import get_info, major as nx_major # Enabled imports directly from nffg_lib package from nffg import NFFG, NFFGToolBox if int(nx_major) > 1: raise RuntimeError( "NetworkX version(<2.0): %s is not supported!" % get_info()[2]) __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"]
<commit_before># Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from nffg import NFFG, NFFGToolBox __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"] <commit_msg>Add NetworkX version checking to nffg_lib package<commit_after>
# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from networkx.release import get_info, major as nx_major # Enabled imports directly from nffg_lib package from nffg import NFFG, NFFGToolBox if int(nx_major) > 1: raise RuntimeError( "NetworkX version(<2.0): %s is not supported!" % get_info()[2]) __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"]
# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from nffg import NFFG, NFFGToolBox __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"] Add NetworkX version checking to nffg_lib package# Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from networkx.release import get_info, major as nx_major # Enabled imports directly from nffg_lib package from nffg import NFFG, NFFGToolBox if int(nx_major) > 1: raise RuntimeError( "NetworkX version(<2.0): %s is not supported!" % get_info()[2]) __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"]
<commit_before># Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from nffg import NFFG, NFFGToolBox __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"] <commit_msg>Add NetworkX version checking to nffg_lib package<commit_after># Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal graph-based implementation of Network Function Forwarding Graph """ from networkx.release import get_info, major as nx_major # Enabled imports directly from nffg_lib package from nffg import NFFG, NFFGToolBox if int(nx_major) > 1: raise RuntimeError( "NetworkX version(<2.0): %s is not supported!" % get_info()[2]) __version__ = nffg.VERSION __all__ = ["NFFG", "NFFGToolBox"]
7f7e606cc15e24190880d7388d07623be783a384
src/address_extractor/__init__.py
src/address_extractor/__init__.py
from .__main__ import main __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'address_extractor' ]
from .__main__ import main from .__main__ import parsed_address_to_human __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'main', 'parsed_address_to_human' ]
Change importing structure in init
Change importing structure in init
Python
mit
scolby33/address_extractor
from .__main__ import main __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'address_extractor' ] Change importing structure in init
from .__main__ import main from .__main__ import parsed_address_to_human __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'main', 'parsed_address_to_human' ]
<commit_before>from .__main__ import main __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'address_extractor' ] <commit_msg>Change importing structure in init<commit_after>
from .__main__ import main from .__main__ import parsed_address_to_human __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'main', 'parsed_address_to_human' ]
from .__main__ import main __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'address_extractor' ] Change importing structure in initfrom .__main__ import main from .__main__ import parsed_address_to_human __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'main', 'parsed_address_to_human' ]
<commit_before>from .__main__ import main __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'address_extractor' ] <commit_msg>Change importing structure in init<commit_after>from .__main__ import main from .__main__ import parsed_address_to_human __version__ = '1.0.0' __title__ = 'address_extractor' __description__ = '' __url__ = '' __author__ = 'Scott Colby' __email__ = '' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2015 Scott Colby' __all__ = [ 'main', 'parsed_address_to_human' ]
88776309a601e67c34747bd2eae49452006be017
zsh/zsh_concat.py
zsh/zsh_concat.py
#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) if __name__ == "__main__": main(argv)
#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) if __name__ == "__main__": main(argv)
Read lib dir, before local dir.
Read lib dir, before local dir.
Python
mit
skk/dotfiles,skk/dotfiles
#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) if __name__ == "__main__": main(argv) Read lib dir, before local dir.
#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) if __name__ == "__main__": main(argv)
<commit_before>#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) if __name__ == "__main__": main(argv) <commit_msg>Read lib dir, before local dir.<commit_after>
#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) if __name__ == "__main__": main(argv)
#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) if __name__ == "__main__": main(argv) Read lib dir, before local dir.#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) if __name__ == "__main__": main(argv)
<commit_before>#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) if __name__ == "__main__": main(argv) <commit_msg>Read lib dir, before local dir.<commit_after>#!/usr/bin/env python3 from os import scandir from sys import argv from platform import uname from pathlib import Path filename_template = """ # ------------------------------------------------------------------------------- # filename: {filename} # ------------------------------------------------------------------------------- {data} # ------------------------------------------------------------------------------- # END # ------------------------------------------------------------------------------- """ def read_and_format_data(filename, outbuf): """ Read file and format Args: filename: Returns: str """ with open(filename, 'r') as inbuf: data = inbuf.read() data = filename_template.format(filename=filename, data=data) outbuf.write(data) def main(args): parent_dir = Path(args[0]).parent lib_dir = parent_dir.joinpath('lib') hostname = uname()[1] local_dir = parent_dir.joinpath('local') outfilename = parent_dir.joinpath("zsh_plugins.zsh") with open(str(outfilename), 'w') as outbuf: for filename in scandir(str(local_dir)): filename = Path(filename.path) if filename.stem == hostname: read_and_format_data(str(filename), outbuf) for filename in scandir(str(lib_dir)): read_and_format_data(filename.path, outbuf) if __name__ == "__main__": main(argv)
b0df06a29d4a235de86e51f4c6ff860fe5495d12
run-tests.py
run-tests.py
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
Test runner: fix line endings, print to stderr
Test runner: fix line endings, print to stderr
Python
mit
divtxt/binder
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest") Test runner: fix line endings, print to stderr
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
<commit_before> import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest") <commit_msg>Test runner: fix line endings, print to stderr<commit_after>
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest") Test runner: fix line endings, print to stderr import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
<commit_before> import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest") <commit_msg>Test runner: fix line endings, print to stderr<commit_after> import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
f1da9bc9aae253779121f2b844e684c4ea4dd15f
seeker/migrations/0001_initial.py
seeker/migrations/0001_initial.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ]
Add related_name to initial migration so it doesn't try to later
Add related_name to initial migration so it doesn't try to later
Python
bsd-2-clause
imsweb/django-seeker,imsweb/django-seeker
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ] Add related_name to initial migration so it doesn't try to later
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ] <commit_msg>Add related_name to initial migration so it doesn't try to later<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ] Add related_name to initial migration so it doesn't try to later# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ] <commit_msg>Add related_name to initial migration so it doesn't try to later<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SavedSearch', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('url', models.CharField(max_length=200, db_index=True)), ('querystring', models.TextField(blank=True)), ('default', models.BooleanField(default=False)), ('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)), ('user', models.ForeignKey(related_name=b'seeker_searches', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': (b'name',), 'verbose_name_plural': b'saved searches', }, bases=(models.Model,), ), ]
d3b526c5079dc61d3bb8a80363c9448de07da331
fabfile.py
fabfile.py
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate')
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
Make it easy to do a full deploy with fab
Make it easy to do a full deploy with fab
Python
mit
cgourlay/readthedocs.org,sunnyzwh/readthedocs.org,attakei/readthedocs-oauth,davidfischer/readthedocs.org,nikolas/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,johncosta/private-readthedocs.org,stevepiercy/readthedocs.org,mrshoki/readthedocs.org,michaelmcandrew/readthedocs.org,royalwang/readthedocs.org,cgourlay/readthedocs.org,tddv/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,raven47git/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,rtfd/readthedocs.org,CedarLogic/readthedocs.org,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,d0ugal/readthedocs.org,cgourlay/readthedocs.org,Carreau/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,safwanrahman/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,emawind84/readthedocs.org,mhils/readthedocs.org,SteveViss/readthedocs.org,alex/readthedocs.org,nikolas/readthedocs.org,d0ugal/readthedocs.org,stevepiercy/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,mhils/readthedocs.org,stevepiercy/readthedocs.org,alex/readthedocs.org,GovReady/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,raven47git/readthedocs.org,KamranMackey/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,jerel/readthedocs.org,ojii/readthedocs.org,alex/readthedocs.org,istresearch/readthedocs.org,Tazer/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,mhils/readthedocs.org,hach-que/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,gjtorikian/readthedocs.org,titiushko/readthedocs.org,Carreau/readthedocs.org,dirn/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,takluyver/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,GovReady/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,d0ugal/readthedocs.org,rtfd/readthedocs.org,hach-que/readthedocs.org,nyergler/pythonslides,royalwang/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,kenwang76/readthedocs.org,nyergler/pythonslides,CedarLogic/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,safwanrahman/readthedocs.org,singingwolfboy/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,nyergler/pythonslides,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,sils1297/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,wanghaven/readthedocs.org,KamranMackey/readthedocs.org,ojii/readthedocs.org,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,agjohnson/readthedocs.org,titiushko/readthedocs.org,singingwolfboy/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,alex/readthedocs.org,hach-que/readthedocs.org,VishvajitP/readthedocs.org,clarkperkins/readthedocs.org,ojii/readthedocs.org,SteveViss/readthedocs.org,mrshoki/readthedocs.org,kenwang76/readthedocs.org,pombredanne/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,kenshinthebattosai/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,gjtorikian/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,asampat3090/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,emawind84/readthedocs.org,emawind84/readthedocs.org,KamranMackey/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,VishvajitP/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,singingwolfboy/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,LukasBoersma/readthedocs.org,laplaceliu/readthedocs.org,michaelmcandrew/readthedocs.org,espdev/readthedocs.org,attakei/readthedocs-oauth,pombredanne/readthedocs.org,sils1297/readthedocs.org,asampat3090/readthedocs.org,raven47git/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') Make it easy to do a full deploy with fab
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
<commit_before>from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') <commit_msg>Make it easy to do a full deploy with fab<commit_after>
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') Make it easy to do a full deploy with fabfrom fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
<commit_before>from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') <commit_msg>Make it easy to do a full deploy with fab<commit_after>from fabric.api import * env.runtime = 'production' env.hosts = ['chimera.ericholscher.com'] env.user = 'docs' env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org' env.virtualenv = '/home/docs/sites/readthedocs.org' env.rundir = '/home/docs/sites/readthedocs.org/run' def push(): "Push new code, but don't restart/reload." local('git push origin master') with cd(env.code_dir): run('git pull origin master') def update_requirements(): "Update requirements in the virtualenv." run("%s/bin/pip install -r %s/deploy_requirements.txt" % (env.virtualenv, env.code_dir)) def migrate(project=None): if project: run('django-admin.py migrate %s' % project) else: run('django-admin.py migrate') def restart(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-gunicorn") def celery(): "Restart (or just start) the server" env.user = "root" run("restart readthedocs-celery") def pull(): "Pull new code" with cd(env.code_dir): run('git pull origin master') def full_deploy(): push() update_requirements() migrate() restart() celery()
bed66179633a86751a938c13b98f5b56c3c1cfc7
fabfile.py
fabfile.py
from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~')
from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \ graphviz') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~')
Add graphviz for converting dot to pdf
Add graphviz for converting dot to pdf
Python
unlicense
spanners/dotfiles
from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~') Add graphviz for converting dot to pdf
from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \ graphviz') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~')
<commit_before>from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~') <commit_msg>Add graphviz for converting dot to pdf<commit_after>
from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \ graphviz') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~')
from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~') Add graphviz for converting dot to pdffrom fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \ graphviz') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~')
<commit_before>from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~') <commit_msg>Add graphviz for converting dot to pdf<commit_after>from fabric.api import local vim_bundles = [ { 'git': 'git://github.com/fatih/vim-go.git', 'path': '~/.vim/bundle/vim-go' } ] def apt_get(): local('sudo apt-get update') local('sudo apt-get upgrade') # neovim instead of vim? local('sudo apt-get install zsh vim wget curl kitty suckless-tools \ xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \ keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \ graphviz') def oh_my_zsh(): local('curl -L http://install.ohmyz.sh | sh') local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc') local('chsh -s $(which shell)') def install_vim(): local('mkdir -p ~/.vim/autoload ~/.vim/bundle') local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim') for bundle in vim_bundles: local('git clone ' + bundle['git'] + ' ' + bundle['path']) local('cd ~') def update_vim(): for bundle in vim_bundles: local('cd ' + bundle['path'] + ' && git pull') local('cd ~')
e98b9e1da819c571e165c55e222a3aa5a20e709b
mrbelvedereci/build/apps.py
mrbelvedereci/build/apps.py
from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build'
from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' def ready(self): import mrbelvedereci.build.handlers
Include handlers in build app
Include handlers in build app
Python
bsd-3-clause
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' Include handlers in build app
from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' def ready(self): import mrbelvedereci.build.handlers
<commit_before>from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' <commit_msg>Include handlers in build app<commit_after>
from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' def ready(self): import mrbelvedereci.build.handlers
from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' Include handlers in build appfrom __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' def ready(self): import mrbelvedereci.build.handlers
<commit_before>from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' <commit_msg>Include handlers in build app<commit_after>from __future__ import unicode_literals from django.apps import AppConfig class BuildConfig(AppConfig): name = 'mrbelvedereci.build' def ready(self): import mrbelvedereci.build.handlers
0ed4b5e2831c649bfb7c31a3fe0716bb02e4a02a
api/settings/staging.py
api/settings/staging.py
from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamsupport@HMCTS.NET'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") }
from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamdev@hmcts.net'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") }
Use dev email or non prod environments
Use dev email or non prod environments
Python
mit
ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas
from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamsupport@HMCTS.NET'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") } Use dev email or non prod environments
from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamdev@hmcts.net'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") }
<commit_before>from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamsupport@HMCTS.NET'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") } <commit_msg>Use dev email or non prod environments<commit_after>
from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamdev@hmcts.net'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") }
from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamsupport@HMCTS.NET'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") } Use dev email or non prod environmentsfrom .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamdev@hmcts.net'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") }
<commit_before>from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamsupport@HMCTS.NET'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") } <commit_msg>Use dev email or non prod environments<commit_after>from .docker import * INSTALLED_APPS.append('raven.contrib.django.raven_compat') ADMINS = ( ('[STAGING] HMCTS Reform Sustaining Support', 'sustainingteamdev@hmcts.net'), ) RAVEN_CONFIG = { 'dsn': os.environ["SENTRY_DSN"], 'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available") }
2009a4ef78261fc8dfe96df00321d3fb612f697e
fireplace/cards/wog/hunter.py
fireplace/cards/wog/hunter.py
from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") )
from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) class OG_216: "Infested Wolf" deathrattle = Summon(CONTROLLER, "OG_216a") * 2 class OG_309: "Princess Huhuran" play = Deathrattle(TARGET) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") )
Implement Infested Wolf and Princess Huhuran
Implement Infested Wolf and Princess Huhuran
Python
agpl-3.0
beheh/fireplace,jleclanche/fireplace,NightKev/fireplace
from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") ) Implement Infested Wolf and Princess Huhuran
from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) class OG_216: "Infested Wolf" deathrattle = Summon(CONTROLLER, "OG_216a") * 2 class OG_309: "Princess Huhuran" play = Deathrattle(TARGET) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") )
<commit_before>from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") ) <commit_msg>Implement Infested Wolf and Princess Huhuran<commit_after>
from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) class OG_216: "Infested Wolf" deathrattle = Summon(CONTROLLER, "OG_216a") * 2 class OG_309: "Princess Huhuran" play = Deathrattle(TARGET) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") )
from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") ) Implement Infested Wolf and Princess Huhuranfrom ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) class OG_216: "Infested Wolf" deathrattle = Summon(CONTROLLER, "OG_216a") * 2 class OG_309: "Princess Huhuran" play = Deathrattle(TARGET) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") )
<commit_before>from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") ) <commit_msg>Implement Infested Wolf and Princess Huhuran<commit_after>from ..utils import * ## # Minions class OG_179: "Fiery Bat" deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1) class OG_292: "Forlorn Stalker" play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e") OG_292e = buff(+1, +1) class OG_216: "Infested Wolf" deathrattle = Summon(CONTROLLER, "OG_216a") * 2 class OG_309: "Princess Huhuran" play = Deathrattle(TARGET) ## # Spells class OG_045: "Infest" play = Buff(FRIENDLY_MINIONS, "OG_045a") class OG_045a: "Nerubian Spores" deathrattle = Give(CONTROLLER, RandomBeast()) tags = {GameTag.DEATHRATTLE: True} class OG_061: "On the Hunt" play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t") class OG_211: "Call of the Wild" play = ( Summon(CONTROLLER, "NEW1_034"), Summon(CONTROLLER, "NEW1_033"), Summon(CONTROLLER, "NEW1_032") )
09c3c511687de8888180577fa66f4ca51f4bc237
taggit_autosuggest_select2/views.py
taggit_autosuggest_select2/views.py
from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
Remove deprecated django json shim
Remove deprecated django json shim
Python
mit
iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2
from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json') Remove deprecated django json shim
from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
<commit_before>from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json') <commit_msg>Remove deprecated django json shim<commit_after>
from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json') Remove deprecated django json shimfrom django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
<commit_before>from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json') <commit_msg>Remove deprecated django json shim<commit_after>from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
16055821b0f43047db4f32f3a16335732b63aa85
Cython/__init__.py
Cython/__init__.py
__version__ = "0.13" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import *
__version__ = "0.13+" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import *
Change version number for dev branch.
Change version number for dev branch.
Python
apache-2.0
scoder/cython,slonik-az/cython,rguillebert/CythonCTypesBackend,encukou/cython,encukou/cython,hickford/cython,roxyboy/cython,fabianrost84/cython,mrGeen/cython,madjar/cython,rguillebert/CythonCTypesBackend,madjar/cython,encukou/cython,rguillebert/CythonCTypesBackend,encukou/cython,madjar/cython,c-blake/cython,JelleZijlstra/cython,larsmans/cython,andreasvc/cython,marscher/cython,cython/cython,rguillebert/CythonCTypesBackend,achernet/cython,mcanthony/cython,acrispin/cython,fabianrost84/cython,hpfem/cython,mcanthony/cython,c-blake/cython,marscher/cython,c-blake/cython,roxyboy/cython,madjar/cython,hhsprings/cython,ABcDexter/cython,acrispin/cython,ABcDexter/cython,slonik-az/cython,roxyboy/cython,dahebolangkuan/cython,da-woods/cython,cython/cython,scoder/cython,marscher/cython,encukou/cython,mcanthony/cython,hpfem/cython,larsmans/cython,hickford/cython,andreasvc/cython,slonik-az/cython,mcanthony/cython,c-blake/cython,fabianrost84/cython,hhsprings/cython,c-blake/cython,dahebolangkuan/cython,acrispin/cython,da-woods/cython,hhsprings/cython,JelleZijlstra/cython,achernet/cython,achernet/cython,dahebolangkuan/cython,slonik-az/cython,acrispin/cython,slonik-az/cython,marscher/cython,dahebolangkuan/cython,andreasvc/cython,larsmans/cython,JelleZijlstra/cython,roxyboy/cython,hhsprings/cython,mrGeen/cython,andreasvc/cython,mrGeen/cython,ABcDexter/cython,JelleZijlstra/cython,fabianrost84/cython,scoder/cython,fperez/cython,ABcDexter/cython,acrispin/cython,scoder/cython,ChristopherHogan/cython,achernet/cython,dahebolangkuan/cython,ABcDexter/cython,cython/cython,roxyboy/cython,mrGeen/cython,mrGeen/cython,da-woods/cython,marscher/cython,cython/cython,hpfem/cython,ChristopherHogan/cython,hickford/cython,mcanthony/cython,fabianrost84/cython,larsmans/cython,bzzzz/cython,fperez/cython,andreasvc/cython,bzzzz/cython,hpfem/cython,fperez/cython,fperez/cython,hickford/cython,bzzzz/cython,ChristopherHogan/cython,da-woods/cython,larsmans/cython,bzzzz/cython,madjar/cython,hhsprings/cython,hickford/cython,achernet/cython,fperez/cython,hpfem/cython,JelleZijlstra/cython
__version__ = "0.13" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import * Change version number for dev branch.
__version__ = "0.13+" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import *
<commit_before>__version__ = "0.13" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import * <commit_msg>Change version number for dev branch.<commit_after>
__version__ = "0.13+" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import *
__version__ = "0.13" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import * Change version number for dev branch.__version__ = "0.13+" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import *
<commit_before>__version__ = "0.13" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import * <commit_msg>Change version number for dev branch.<commit_after>__version__ = "0.13+" # Void cython.* directives (for case insensitive operating systems). from Cython.Shadow import *
a4264c610f33640ac773ca0b12912f3ad972d966
feedback/admin.py
feedback/admin.py
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin)
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
Add Admin action to feedbacks
Add Admin action to feedbacks
Python
mit
n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin) Add Admin action to feedbacks
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
<commit_before>from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin) <commit_msg>Add Admin action to feedbacks<commit_after>
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin) Add Admin action to feedbacksfrom django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
<commit_before>from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] admin.site.register(Feedback, FeedbackAdmin) <commit_msg>Add Admin action to feedbacks<commit_after>from django.contrib import admin # Register your models here. from .models import Feedback class FeedbackAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'note', 'archive', 'public') list_filter = ['created'] search_fields = ['name', 'email', 'note', 'archive', 'public'] actions = ['to_archive'] def to_archive(self, request, queryset): queryset.update(archive=True) to_archive.short_description = "Markierte Einträge archivieren" admin.site.register(Feedback, FeedbackAdmin)
03f07ee52136d0794f581946718bd3ab7c53e22c
git-keeper-core/setup.py
git-keeper-core/setup.py
# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], install_requires=['paramiko'], setup_requires=['pytest-runner'], tests_require=['pytest'], )
# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], setup_requires=['pytest-runner'], tests_require=['pytest'], )
Remove paramiko requirement from core
Remove paramiko requirement from core
Python
agpl-3.0
git-keeper/git-keeper,git-keeper/git-keeper
# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], install_requires=['paramiko'], setup_requires=['pytest-runner'], tests_require=['pytest'], ) Remove paramiko requirement from core
# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], setup_requires=['pytest-runner'], tests_require=['pytest'], )
<commit_before># setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], install_requires=['paramiko'], setup_requires=['pytest-runner'], tests_require=['pytest'], ) <commit_msg>Remove paramiko requirement from core<commit_after>
# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], setup_requires=['pytest-runner'], tests_require=['pytest'], )
# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], install_requires=['paramiko'], setup_requires=['pytest-runner'], tests_require=['pytest'], ) Remove paramiko requirement from core# setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], setup_requires=['pytest-runner'], tests_require=['pytest'], )
<commit_before># setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], install_requires=['paramiko'], setup_requires=['pytest-runner'], tests_require=['pytest'], ) <commit_msg>Remove paramiko requirement from core<commit_after># setup.py for git-keeper-core from setuptools import setup setup( name='git-keeper-core', version='0.1.0', description='Core modules for git-keeper-client and git-keeper-server.', url='https://github.com/git-keeper/git-keeper', license='GPL 3', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Education', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Natural Language :: English', 'Topic :: Education :: Testing', 'Topic :: Education' ], packages=['gkeepcore'], setup_requires=['pytest-runner'], tests_require=['pytest'], )
b824e4c4a106d73c842a38758addde52d94e976a
ngrams_feature_extractor.py
ngrams_feature_extractor.py
import sklearn def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
import sklearn from hdf5_getters import * import os def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} for root, dirs, files in os.walk('data'): files = glob.glob(os.path.join(root, '*h5')) for f in files: train_pair = make_train_pair(f) titles.append(train_pair['title']) pitch_diff_list.append(train_pair['pitch_diffs']) # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
Add corresponding hdf5 parsing file
Add corresponding hdf5 parsing file
Python
mit
ajnam12/MusicNLP
import sklearn def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example predictionAdd corresponding hdf5 parsing file
import sklearn from hdf5_getters import * import os def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} for root, dirs, files in os.walk('data'): files = glob.glob(os.path.join(root, '*h5')) for f in files: train_pair = make_train_pair(f) titles.append(train_pair['title']) pitch_diff_list.append(train_pair['pitch_diffs']) # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
<commit_before>import sklearn def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction<commit_msg>Add corresponding hdf5 parsing file<commit_after>
import sklearn from hdf5_getters import * import os def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} for root, dirs, files in os.walk('data'): files = glob.glob(os.path.join(root, '*h5')) for f in files: train_pair = make_train_pair(f) titles.append(train_pair['title']) pitch_diff_list.append(train_pair['pitch_diffs']) # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
import sklearn def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example predictionAdd corresponding hdf5 parsing fileimport sklearn from hdf5_getters import * import os def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} for root, dirs, files in os.walk('data'): files = glob.glob(os.path.join(root, '*h5')) for f in files: train_pair = make_train_pair(f) titles.append(train_pair['title']) pitch_diff_list.append(train_pair['pitch_diffs']) # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
<commit_before>import sklearn def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction<commit_msg>Add corresponding hdf5 parsing file<commit_after>import sklearn from hdf5_getters import * import os def make_train_pair(filename): h5 = open_h5_file_read(filename) title = get_title(h5) pitches = get_segments_pitches(h5)[:11] # limit: only look at beginning pitch_diffs = [pitches[i] - pitches[i - 1] for i in xrange(1, len(pitches))] h5.close() return {'title': title, 'pitch_diffs': pitch_diffs} for root, dirs, files in os.walk('data'): files = glob.glob(os.path.join(root, '*h5')) for f in files: train_pair = make_train_pair(f) titles.append(train_pair['title']) pitch_diff_list.append(train_pair['pitch_diffs']) # some lines omitted neigh = NearestNeighbors(n_neighbors=1) # predict the closest song # a title list is also maintained neigh.fit([sum(diff) for diff in pitch_diff_list[5000:]]) neigh.kneighbors(sum(pitch_diff_list[2029])) # example prediction
444d97288c0fd80adf4077477336c98bfea140cc
node.py
node.py
class Node(object): def __init__(self): # Properties will go here!
class Node(object): def __init__(self): # Node(s) from which this Node receives values self.inbound_nodes = inbound_nodes # Node(s) to which this Node passes values self.outbound_nodes = [] # For each inbound Node here, add this Node as an outbound to that Node. for n in self.inbound_nodes: n.outbound_nodes.append(self)
Initialize inbound and outbound Nodes of a Node
Initialize inbound and outbound Nodes of a Node
Python
mit
YabinHu/miniflow
class Node(object): def __init__(self): # Properties will go here! Initialize inbound and outbound Nodes of a Node
class Node(object): def __init__(self): # Node(s) from which this Node receives values self.inbound_nodes = inbound_nodes # Node(s) to which this Node passes values self.outbound_nodes = [] # For each inbound Node here, add this Node as an outbound to that Node. for n in self.inbound_nodes: n.outbound_nodes.append(self)
<commit_before>class Node(object): def __init__(self): # Properties will go here! <commit_msg>Initialize inbound and outbound Nodes of a Node<commit_after>
class Node(object): def __init__(self): # Node(s) from which this Node receives values self.inbound_nodes = inbound_nodes # Node(s) to which this Node passes values self.outbound_nodes = [] # For each inbound Node here, add this Node as an outbound to that Node. for n in self.inbound_nodes: n.outbound_nodes.append(self)
class Node(object): def __init__(self): # Properties will go here! Initialize inbound and outbound Nodes of a Nodeclass Node(object): def __init__(self): # Node(s) from which this Node receives values self.inbound_nodes = inbound_nodes # Node(s) to which this Node passes values self.outbound_nodes = [] # For each inbound Node here, add this Node as an outbound to that Node. for n in self.inbound_nodes: n.outbound_nodes.append(self)
<commit_before>class Node(object): def __init__(self): # Properties will go here! <commit_msg>Initialize inbound and outbound Nodes of a Node<commit_after>class Node(object): def __init__(self): # Node(s) from which this Node receives values self.inbound_nodes = inbound_nodes # Node(s) to which this Node passes values self.outbound_nodes = [] # For each inbound Node here, add this Node as an outbound to that Node. for n in self.inbound_nodes: n.outbound_nodes.append(self)
6558af15ae2f694b3ea08174e238d3d4de811c95
warp10/src/main/python/callable.py
warp10/src/main/python/callable.py
#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding str = base64.b64decode(line) args = cPickle.loads(str) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8'))
#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding mystr = base64.b64decode(line) args = cPickle.loads(mystr) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8'))
Change default python string name str to mystr as str is a reserved name
Change default python string name str to mystr as str is a reserved name
Python
apache-2.0
hbs/warp10-platform,cityzendata/warp10-platform,cityzendata/warp10-platform,StevenLeRoux/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,hbs/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform
#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding str = base64.b64decode(line) args = cPickle.loads(str) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8')) Change default python string name str to mystr as str is a reserved name
#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding mystr = base64.b64decode(line) args = cPickle.loads(mystr) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8'))
<commit_before>#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding str = base64.b64decode(line) args = cPickle.loads(str) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8')) <commit_msg>Change default python string name str to mystr as str is a reserved name<commit_after>
#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding mystr = base64.b64decode(line) args = cPickle.loads(mystr) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8'))
#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding str = base64.b64decode(line) args = cPickle.loads(str) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8')) Change default python string name str to mystr as str is a reserved name#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding mystr = base64.b64decode(line) args = cPickle.loads(mystr) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8'))
<commit_before>#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding str = base64.b64decode(line) args = cPickle.loads(str) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8')) <commit_msg>Change default python string name str to mystr as str is a reserved name<commit_after>#!/usr/bin/env python -u import cPickle import sys import urllib import base64 # # Output the maximum number of instances of this 'callable' to spawn # The absolute maximum is set in the configuration file via 'warpscript.call.maxcapacity' # print 10 # # Loop, reading stdin, doing our stuff and outputing to stdout # while True: try: # # Read input. 'CALL' will transmit a single string argument from the stack, URL encoding it before transmission. # The 'callable' should describe how its input is to be formatted. # For python callable, we recommend base64 encoded pickle content (generated via ->PICKLE). # line = sys.stdin.readline() line = line.strip() line = urllib.unquote(line.decode('utf-8')) # Remove Base64 encoding mystr = base64.b64decode(line) args = cPickle.loads(mystr) # # Do out stuff # output = 'output' # # Output result (URL encoded UTF-8). # print urllib.quote(output.encode('utf-8')) except Exception as err: # # If returning a content starting with ' ' (not URL encoded), then # the rest of the line is interpreted as a URL encoded UTF-8 of an error message # and will propagate the error to the calling WarpScript # print ' ' + urllib.quote(repr(err).encode('utf-8'))
cc06a15f734a6ed46561a99d1040a08582833a09
src/puzzle/heuristics/acrostic.py
src/puzzle/heuristics/acrostic.py
from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass
from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).
Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).
from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
<commit_before>from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass <commit_msg>Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).<commit_after>
from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
<commit_before>from puzzle.heuristics.acrostics import _acrostic_iter class Acrostic(_acrostic_iter.AcrosticIter): """Best available Acrostic solver.""" pass <commit_msg>Use AccrosticSearch (~BFS) instead of AcrosticIter (~DFS).<commit_after>from puzzle.heuristics.acrostics import _acrostic_search class Acrostic(_acrostic_search.AcrosticSearch): """Best available Acrostic solver.""" pass
932d4c19810c26f94b5a1729130f5d459db6831e
tests/pytests/integration/_logging/test_jid_logging.py
tests/pytests/integration/_logging/test_jid_logging.py
import logging import salt.config from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter)
import logging from salt._logging import DFLT_LOG_FMT_JID from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter)
Fix AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID'
Fix AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' ``` $ python3 -m pytest -ra tests/pytests/integration/_logging/test_jid_logging.py _______________________________ test_jid_in_logs _______________________________ @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ > jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] E AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' tests/pytests/integration/_logging/test_jid_logging.py:15: AttributeError ``` Commit a9d5e75b528b711412b8fa30eb61c54413a254e9 changed the private variable to a public one. Signed-off-by: Benjamin Drung <d65d2fc4caaf46ce74f284673a90ae280f78d1b6@debian.org>
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
import logging import salt.config from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter) Fix AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' ``` $ python3 -m pytest -ra tests/pytests/integration/_logging/test_jid_logging.py _______________________________ test_jid_in_logs _______________________________ @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ > jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] E AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' tests/pytests/integration/_logging/test_jid_logging.py:15: AttributeError ``` Commit a9d5e75b528b711412b8fa30eb61c54413a254e9 changed the private variable to a public one. Signed-off-by: Benjamin Drung <d65d2fc4caaf46ce74f284673a90ae280f78d1b6@debian.org>
import logging from salt._logging import DFLT_LOG_FMT_JID from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter)
<commit_before>import logging import salt.config from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter) <commit_msg>Fix AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' ``` $ python3 -m pytest -ra tests/pytests/integration/_logging/test_jid_logging.py _______________________________ test_jid_in_logs _______________________________ @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ > jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] E AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' tests/pytests/integration/_logging/test_jid_logging.py:15: AttributeError ``` Commit a9d5e75b528b711412b8fa30eb61c54413a254e9 changed the private variable to a public one. Signed-off-by: Benjamin Drung <d65d2fc4caaf46ce74f284673a90ae280f78d1b6@debian.org><commit_after>
import logging from salt._logging import DFLT_LOG_FMT_JID from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter)
import logging import salt.config from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter) Fix AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' ``` $ python3 -m pytest -ra tests/pytests/integration/_logging/test_jid_logging.py _______________________________ test_jid_in_logs _______________________________ @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ > jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] E AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' tests/pytests/integration/_logging/test_jid_logging.py:15: AttributeError ``` Commit a9d5e75b528b711412b8fa30eb61c54413a254e9 changed the private variable to a public one. Signed-off-by: Benjamin Drung <d65d2fc4caaf46ce74f284673a90ae280f78d1b6@debian.org>import logging from salt._logging import DFLT_LOG_FMT_JID from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter)
<commit_before>import logging import salt.config from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter) <commit_msg>Fix AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' ``` $ python3 -m pytest -ra tests/pytests/integration/_logging/test_jid_logging.py _______________________________ test_jid_in_logs _______________________________ @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ > jid_formatted_str = salt.config._DFLT_LOG_FMT_JID.split("%")[0] E AttributeError: module 'salt.config' has no attribute '_DFLT_LOG_FMT_JID' tests/pytests/integration/_logging/test_jid_logging.py:15: AttributeError ``` Commit a9d5e75b528b711412b8fa30eb61c54413a254e9 changed the private variable to a public one. Signed-off-by: Benjamin Drung <d65d2fc4caaf46ce74f284673a90ae280f78d1b6@debian.org><commit_after>import logging from salt._logging import DFLT_LOG_FMT_JID from tests.support.helpers import PRE_PYTEST_SKIP # Using the PRE_PYTEST_SKIP decorator since this test still fails on some platforms. # Will investigate later. @PRE_PYTEST_SKIP def test_jid_in_logs(caplog, salt_call_cli): """ Test JID in log_format """ jid_formatted_str = DFLT_LOG_FMT_JID.split("%")[0] formatter = logging.Formatter(fmt="%(jid)s %(message)s") with caplog.at_level(logging.DEBUG): previous_formatter = caplog.handler.formatter try: caplog.handler.setFormatter(formatter) ret = salt_call_cli.run("test.ping") assert ret.returncode == 0 assert ret.data is True assert_error_msg = ( "'{}' not found in log messages:\n>>>>>>>>>{}\n<<<<<<<<<".format( jid_formatted_str, caplog.text ) ) assert jid_formatted_str in caplog.text, assert_error_msg finally: caplog.handler.setFormatter(previous_formatter)
773064a61fcd4213196e44d347e304d746b28325
syft/frameworks/torch/tensors/native.py
syft/frameworks/torch/tensors/native.py
import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr
import random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr
Set local worker as default for SyftTensor owner
Set local worker as default for SyftTensor owner
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr Set local worker as default for SyftTensor owner
import random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr
<commit_before>import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr <commit_msg>Set local worker as default for SyftTensor owner<commit_after>
import random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr
import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr Set local worker as default for SyftTensor ownerimport random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr
<commit_before>import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr <commit_msg>Set local worker as default for SyftTensor owner<commit_after>import random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) else: ptr = previous_pointer return ptr
debb03975e8b647f27980081371bd9fdad7b292f
solar/solar/system_log/operations.py
solar/solar/system_log/operations.py
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
Fix update of logitem bug in system_log
Fix update of logitem bug in system_log
Python
apache-2.0
loles/solar,openstack/solar,loles/solar,pigmej/solar,pigmej/solar,torgartor21/solar,torgartor21/solar,loles/solar,dshulyak/solar,zen/solar,zen/solar,dshulyak/solar,zen/solar,Mirantis/solar,Mirantis/solar,Mirantis/solar,pigmej/solar,Mirantis/solar,CGenie/solar,CGenie/solar,openstack/solar,zen/solar,loles/solar,openstack/solar
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data Fix update of logitem bug in system_log
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
<commit_before> from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data <commit_msg>Fix update of logitem bug in system_log<commit_after>
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data Fix update of logitem bug in system_log from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
<commit_before> from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(task_uuid, item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data <commit_msg>Fix update of logitem bug in system_log<commit_after> from solar.system_log import data from dictdiffer import patch def set_error(task_uuid, *args, **kwargs): sl = data.SL() item = sl.get(task_uuid) if item: item.state = data.STATES.error sl.update(item) def move_to_commited(task_uuid, *args, **kwargs): sl = data.SL() item = sl.pop(task_uuid) if item: commited = data.CD() staged_data = patch(item.diff, commited.get(item.res, {})) cl = data.CL() item.state = data.STATES.success cl.append(item) commited[item.res] = staged_data
e7e6274ee5fa16cb07e32bebe53532a6a16b7965
dagrevis_lv/blog/templatetags/tags.py
dagrevis_lv/blog/templatetags/tags.py
from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = (max_priority / 10.) * priority return "font-size: {}em;".format(size)
from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = 100 / max_priority / priority / 2 return "font-size: {}em;".format(size)
Fix tag cloud weird size
Fix tag cloud weird size
Python
mit
daGrevis/daGrevis.lv,daGrevis/daGrevis.lv,daGrevis/daGrevis.lv
from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = (max_priority / 10.) * priority return "font-size: {}em;".format(size) Fix tag cloud weird size
from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = 100 / max_priority / priority / 2 return "font-size: {}em;".format(size)
<commit_before>from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = (max_priority / 10.) * priority return "font-size: {}em;".format(size) <commit_msg>Fix tag cloud weird size<commit_after>
from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = 100 / max_priority / priority / 2 return "font-size: {}em;".format(size)
from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = (max_priority / 10.) * priority return "font-size: {}em;".format(size) Fix tag cloud weird sizefrom django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = 100 / max_priority / priority / 2 return "font-size: {}em;".format(size)
<commit_before>from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = (max_priority / 10.) * priority return "font-size: {}em;".format(size) <commit_msg>Fix tag cloud weird size<commit_after>from django import template register = template.Library() @register.filter def get_style(tags, priority): max_priority = max(tags, key=lambda tag: tag["priority"])["priority"] size = 100 / max_priority / priority / 2 return "font-size: {}em;".format(size)