commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
64c8fd3fa18dd6644a67cbd9e9aa5f20eb5e85a7
var/spack/packages/mrnet/package.py
var/spack/packages/mrnet/package.py
from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') parallel = False depends_on("boost") def install(self, spec, prefix): configure("--prefix=%s" %prefix, "--enable-shared") make() make("install")
from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False depends_on("boost") def install(self, spec, prefix): # Build the MRNet LW thread safe libraries when the krelloptions variant is present if '+krelloptions' in spec: configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe") else: configure("--prefix=%s" %prefix, "--enable-shared") make() make("install")
Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
Python
lgpl-2.1
EmreAtes/spack,krafczyk/spack,matthiasdiener/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,lgarren/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,krafczyk/spack,matthiasdiener/spack,skosukhin/spack,skosukhin/spack,mfherbst/spack,EmreAtes/spack,TheTimmy/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,tmerrick1/spack,TheTimmy/spack,lgarren/spack,LLNL/spack,iulian787/spack,krafczyk/spack,skosukhin/spack,LLNL/spack,TheTimmy/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack,skosukhin/spack,iulian787/spack,TheTimmy/spack,mfherbst/spack,lgarren/spack,skosukhin/spack,tmerrick1/spack,iulian787/spack,iulian787/spack
from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') parallel = False depends_on("boost") def install(self, spec, prefix): configure("--prefix=%s" %prefix, "--enable-shared") make() make("install") Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.
from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False depends_on("boost") def install(self, spec, prefix): # Build the MRNet LW thread safe libraries when the krelloptions variant is present if '+krelloptions' in spec: configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe") else: configure("--prefix=%s" %prefix, "--enable-shared") make() make("install")
<commit_before>from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') parallel = False depends_on("boost") def install(self, spec, prefix): configure("--prefix=%s" %prefix, "--enable-shared") make() make("install") <commit_msg>Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.<commit_after>
from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False depends_on("boost") def install(self, spec, prefix): # Build the MRNet LW thread safe libraries when the krelloptions variant is present if '+krelloptions' in spec: configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe") else: configure("--prefix=%s" %prefix, "--enable-shared") make() make("install")
from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') parallel = False depends_on("boost") def install(self, spec, prefix): configure("--prefix=%s" %prefix, "--enable-shared") make() make("install") Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False depends_on("boost") def install(self, spec, prefix): # Build the MRNet LW thread safe libraries when the krelloptions variant is present if '+krelloptions' in spec: configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe") else: configure("--prefix=%s" %prefix, "--enable-shared") make() make("install")
<commit_before>from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') parallel = False depends_on("boost") def install(self, spec, prefix): configure("--prefix=%s" %prefix, "--enable-shared") make() make("install") <commit_msg>Add krelloptions variant that is used to turn on a configuration option to build the thread safe lightweight libraries.<commit_after>from spack import * class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') version('4.1.0', '5a248298b395b329e2371bf25366115c') variant('krelloptions', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False depends_on("boost") def install(self, spec, prefix): # Build the MRNet LW thread safe libraries when the krelloptions variant is present if '+krelloptions' in spec: configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe") else: configure("--prefix=%s" %prefix, "--enable-shared") make() make("install")
42b6e51df4377e933e6ee24b12a5d83d42a655da
backend/setup.py
backend/setup.py
from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', 'postmarker', 'django-extensions', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Add missing packages to backend
Add missing packages to backend
Python
bsd-3-clause
picsadotcom/maguire
from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) Add missing packages to backend
from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', 'postmarker', 'django-extensions', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
<commit_before>from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) <commit_msg>Add missing packages to backend<commit_after>
from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', 'postmarker', 'django-extensions', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) Add missing packages to backendfrom setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', 'postmarker', 'django-extensions', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
<commit_before>from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) <commit_msg>Add missing packages to backend<commit_after>from setuptools import setup, find_packages setup( name="maguire", version="0.1", url='https://github.com/picsadotcom/maguire', license='BSD', author='Picsa', author_email='admin@picsa.com', packages=find_packages(), include_package_data=True, install_requires=[ 'Django', 'djangorestframework==3.6.4', 'django-rest-auth', 'dj-database-url', 'psycopg2', 'raven', 'gunicorn', 'django-filter', 'whitenoise', 'celery', 'redis', 'pytz', 'python-dateutil', 'django-cors-middleware', 'django-reversion', 'graphene<2.0', 'graphene-django<2.0.0', 'graphql-core<2.0', 'pendulum', 'django-role-permissions==1.2.1', 'django-celery-beat', 'boto3', 'django-storages', 'opbeat', 'postmarker', 'django-extensions', ], classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
51a505747d29198ea3df0a43c32b1018a40e6bc9
monopoly/Bank/g.py
monopoly/Bank/g.py
import time import json import sqlite3 import socket from collections import deque class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = deque([0] * 10, maxlen=10) super().__init__(*args) def send(self, message, *args): try: elapsed = int(time.time() * 1000) - self.floodQueue[0] if not isinstance(message, bytes): message = message.encode() if elapsed / 10 < 100: # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding time.sleep((100 - elapsed / 10) / 1000) super().send(message, *args) self.floodQueue.append(int(time.time() * 1000)) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None
import time import json import sqlite3 import socket from collections import deque class ratelimit: def __init__(self, max, duration): self.max = max self.duration = duration # in milliseconds self.rateQueue = deque([0] * max, maxlen=max) def queue(self, job): elapsed = int(time.time() * 1000) - self.rateQueue[0] if elapsed / self.max < self.duration: time.sleep((self.duration - elapsed / self.max) / 1000) self.rateQueue.append(int(time.time() * 1000)) # TODO need to adapt this for the event loop so we can yield on delays class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = ratelimit(10, 100) super().__init__(*args) def send(self, message, *args): try: if not isinstance(message, bytes): message = message.encode() # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding self.floodQueue.queue() super().send(message, *args) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None
Move ratelimit functionality into its own class
Move ratelimit functionality into its own class
Python
mit
laneshetron/monopoly
import time import json import sqlite3 import socket from collections import deque class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = deque([0] * 10, maxlen=10) super().__init__(*args) def send(self, message, *args): try: elapsed = int(time.time() * 1000) - self.floodQueue[0] if not isinstance(message, bytes): message = message.encode() if elapsed / 10 < 100: # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding time.sleep((100 - elapsed / 10) / 1000) super().send(message, *args) self.floodQueue.append(int(time.time() * 1000)) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None Move ratelimit functionality into its own class
import time import json import sqlite3 import socket from collections import deque class ratelimit: def __init__(self, max, duration): self.max = max self.duration = duration # in milliseconds self.rateQueue = deque([0] * max, maxlen=max) def queue(self, job): elapsed = int(time.time() * 1000) - self.rateQueue[0] if elapsed / self.max < self.duration: time.sleep((self.duration - elapsed / self.max) / 1000) self.rateQueue.append(int(time.time() * 1000)) # TODO need to adapt this for the event loop so we can yield on delays class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = ratelimit(10, 100) super().__init__(*args) def send(self, message, *args): try: if not isinstance(message, bytes): message = message.encode() # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding self.floodQueue.queue() super().send(message, *args) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None
<commit_before>import time import json import sqlite3 import socket from collections import deque class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = deque([0] * 10, maxlen=10) super().__init__(*args) def send(self, message, *args): try: elapsed = int(time.time() * 1000) - self.floodQueue[0] if not isinstance(message, bytes): message = message.encode() if elapsed / 10 < 100: # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding time.sleep((100 - elapsed / 10) / 1000) super().send(message, *args) self.floodQueue.append(int(time.time() * 1000)) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None <commit_msg>Move ratelimit functionality into its own class<commit_after>
import time import json import sqlite3 import socket from collections import deque class ratelimit: def __init__(self, max, duration): self.max = max self.duration = duration # in milliseconds self.rateQueue = deque([0] * max, maxlen=max) def queue(self, job): elapsed = int(time.time() * 1000) - self.rateQueue[0] if elapsed / self.max < self.duration: time.sleep((self.duration - elapsed / self.max) / 1000) self.rateQueue.append(int(time.time() * 1000)) # TODO need to adapt this for the event loop so we can yield on delays class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = ratelimit(10, 100) super().__init__(*args) def send(self, message, *args): try: if not isinstance(message, bytes): message = message.encode() # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding self.floodQueue.queue() super().send(message, *args) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None
import time import json import sqlite3 import socket from collections import deque class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = deque([0] * 10, maxlen=10) super().__init__(*args) def send(self, message, *args): try: elapsed = int(time.time() * 1000) - self.floodQueue[0] if not isinstance(message, bytes): message = message.encode() if elapsed / 10 < 100: # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding time.sleep((100 - elapsed / 10) / 1000) super().send(message, *args) self.floodQueue.append(int(time.time() * 1000)) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None Move ratelimit functionality into its own classimport time import json import sqlite3 import socket from collections import deque class ratelimit: def __init__(self, max, duration): self.max = max self.duration = duration # in milliseconds self.rateQueue = deque([0] * max, maxlen=max) def queue(self, job): elapsed = int(time.time() * 1000) - self.rateQueue[0] if elapsed / self.max < self.duration: time.sleep((self.duration - elapsed / self.max) / 1000) self.rateQueue.append(int(time.time() * 1000)) # TODO need to adapt this for the event loop so we can yield on delays class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = ratelimit(10, 100) super().__init__(*args) def send(self, message, *args): try: if not isinstance(message, bytes): message = message.encode() # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding self.floodQueue.queue() super().send(message, *args) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None
<commit_before>import time import json import sqlite3 import socket from collections import deque class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = deque([0] * 10, maxlen=10) super().__init__(*args) def send(self, message, *args): try: elapsed = int(time.time() * 1000) - self.floodQueue[0] if not isinstance(message, bytes): message = message.encode() if elapsed / 10 < 100: # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding time.sleep((100 - elapsed / 10) / 1000) super().send(message, *args) self.floodQueue.append(int(time.time() * 1000)) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None <commit_msg>Move ratelimit functionality into its own class<commit_after>import time import json import sqlite3 import socket from collections import deque class ratelimit: def __init__(self, max, duration): self.max = max self.duration = duration # in milliseconds self.rateQueue = deque([0] * max, maxlen=max) def queue(self, job): elapsed = int(time.time() * 1000) - self.rateQueue[0] if elapsed / self.max < self.duration: time.sleep((self.duration - elapsed / self.max) / 1000) self.rateQueue.append(int(time.time() * 1000)) # TODO need to adapt this for the event loop so we can yield on delays class safesocket(socket.socket): def __init__(self, *args): self.floodQueue = ratelimit(10, 100) super().__init__(*args) def send(self, message, *args): try: if not isinstance(message, bytes): message = message.encode() # Start rate limiting after 10 messages within 100ms # to avoid IRC kicking us for flooding self.floodQueue.queue() super().send(message, *args) except Exception as e: print('Could not write to socket: ', e) starttime = int(time.time()) lastDisconnect = 0 with open('config/config.json') as config_file: config = json.load(config_file) channels = config['irc']['channels'] silent_channels = config['irc']['silent_channels'] db = sqlite3.connect(config['db']['location']) cursor = db.cursor() ircsock = None
f52287bfc6a38b35daf9d880886cc159550a157c
mutant/__init__.py
mutant/__init__.py
import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant') import hacks
import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant')
Make sure to avoid loading hacks on mutant loading
Make sure to avoid loading hacks on mutant loading
Python
mit
charettes/django-mutant
import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant') import hacksMake sure to avoid loading hacks on mutant loading
import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant')
<commit_before>import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant') import hacks<commit_msg>Make sure to avoid loading hacks on mutant loading<commit_after>
import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant')
import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant') import hacksMake sure to avoid loading hacks on mutant loadingimport logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant')
<commit_before>import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant') import hacks<commit_msg>Make sure to avoid loading hacks on mutant loading<commit_after>import logging __version__ = VERSION = (0, 0, 1) logger = logging.getLogger('mutant')
4585ab22a4185122162b987cf8cc845a63ed5a05
pyheufybot/modules/say.py
pyheufybot/modules/say.py
from module_interface import Module, ModuleType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): pass
from module_interface import Module, ModuleType from message import IRCResponse, ResponseType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): return [ IRCResponse(message.replyTo, ResponseType.MESSAGE, message.messageText) ]
Make it possible for modules to send a response
Make it possible for modules to send a response
Python
mit
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
from module_interface import Module, ModuleType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): pass Make it possible for modules to send a response
from module_interface import Module, ModuleType from message import IRCResponse, ResponseType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): return [ IRCResponse(message.replyTo, ResponseType.MESSAGE, message.messageText) ]
<commit_before>from module_interface import Module, ModuleType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): pass <commit_msg>Make it possible for modules to send a response<commit_after>
from module_interface import Module, ModuleType from message import IRCResponse, ResponseType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): return [ IRCResponse(message.replyTo, ResponseType.MESSAGE, message.messageText) ]
from module_interface import Module, ModuleType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): pass Make it possible for modules to send a responsefrom module_interface import Module, ModuleType from message import IRCResponse, ResponseType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): return [ IRCResponse(message.replyTo, ResponseType.MESSAGE, message.messageText) ]
<commit_before>from module_interface import Module, ModuleType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): pass <commit_msg>Make it possible for modules to send a response<commit_after>from module_interface import Module, ModuleType from message import IRCResponse, ResponseType class Say(Module): def __init__(self): self.trigger = "say" self.moduleType = ModuleType.ACTIVE self.messagesTypes = ["PRIVMSG"] self.helpText = "Usage: say <message> | Makes the bot say the given line" def execute(self, message, serverInfo): return [ IRCResponse(message.replyTo, ResponseType.MESSAGE, message.messageText) ]
0f4fd0d49ba06963b0b97e032fd2e6eedf8e597a
cloacina/extract_from_b64.py
cloacina/extract_from_b64.py
from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data
from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) doc = re.sub('<div class="BODY-2">', " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data
Fix missing space between lede and rest of article
Fix missing space between lede and rest of article
Python
mit
ahalterman/cloacina
from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data Fix missing space between lede and rest of article
from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) doc = re.sub('<div class="BODY-2">', " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data
<commit_before>from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data <commit_msg>Fix missing space between lede and rest of article<commit_after>
from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) doc = re.sub('<div class="BODY-2">', " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data
from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data Fix missing space between lede and rest of articlefrom bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) doc = re.sub('<div class="BODY-2">', " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data
<commit_before>from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data <commit_msg>Fix missing space between lede and rest of article<commit_after>from bs4 import BeautifulSoup import json import re def extract_from_b64(encoded_doc): #doc = base64.urlsafe_b64decode(encoded_doc) doc = encoded_doc.decode("base64") doc = re.sub("</p><p>", " ", doc) doc = re.sub('<div class="BODY-2">', " ", doc) soup = BeautifulSoup(doc) news_source = soup.find("meta", {"name":"sourceName"})['content'] article_title = soup.find("title").text.strip() try: publication_date = soup.find("div", {"class":"PUB-DATE"}).text.strip() except AttributeError: publication_date = soup.find("div", {"class":"DATE"}).text.strip() article_body = soup.find("div", {"class":"BODY"}).text.strip() doc_id = soup.find("meta", {"name":"documentToken"})['content'] data = {"news_source" : news_source, "publication_date_raw" : publication_date, "article_title" : article_title, "article_body" : article_body, "doc_id" : doc_id} return data
51d37f7ac6bebf9b1d4c6efd16a968b7410a7791
rcnmf/tests/test_tsqr2.py
rcnmf/tests/test_tsqr2.py
import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') temp_file.close()
import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') uri = temp_file.name + '::/mul' into(uri, mul) temp_file.close()
Save mul at the end to force execution.
Save mul at the end to force execution.
Python
bsd-2-clause
marianotepper/csnmf
import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') temp_file.close() Save mul at the end to force execution.
import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') uri = temp_file.name + '::/mul' into(uri, mul) temp_file.close()
<commit_before>import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') temp_file.close() <commit_msg>Save mul at the end to force execution.<commit_after>
import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') uri = temp_file.name + '::/mul' into(uri, mul) temp_file.close()
import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') temp_file.close() Save mul at the end to force execution.import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') uri = temp_file.name + '::/mul' into(uri, mul) temp_file.close()
<commit_before>import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') temp_file.close() <commit_msg>Save mul at the end to force execution.<commit_after>import dask.array as da from into import into from dask.array.into import discover from dask.dot import dot_graph import tempfile import rcnmf.tsqr x = da.random.standard_normal(size=(100, 100), blockshape=(100, 50)) temp_file = tempfile.NamedTemporaryFile(suffix='.hdf5') uri = temp_file.name + '::/X' into(uri, x) data = into(da.Array, uri, blockshape=(100, 100)) omega = da.random.standard_normal(size=(100, 20), blockshape=(100, 20)) mat_h = data.dot(omega) q, r = rcnmf. tsqr.tsqr(mat_h, blockshape=(100, 20)) print data.shape print q.shape mul = data.dot(q) dot_graph(data.dask, filename='data') dot_graph(omega.dask, filename='omega') dot_graph(q.dask, filename='q') dot_graph(mul.dask, filename='mul') uri = temp_file.name + '::/mul' into(uri, mul) temp_file.close()
359040acc4b8c54db84e154b15cabfb23b4e18a6
src/aiy/vision/models/utils.py
src/aiy/vision/models/utils.py
"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.path.join('/opt/aiy/models', name) with open(path, 'rb') as f: return f.read()
"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.environ.get('VISION_BONNET_MODELS_PATH', '/opt/aiy/models') with open(os.path.join(path, name), 'rb') as f: return f.read()
Use VISION_BONNET_MODELS_PATH env var for custom models path.
Use VISION_BONNET_MODELS_PATH env var for custom models path. Change-Id: I687ca96e4cf768617fa45d50d68dadffde750b87
Python
apache-2.0
google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian
"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.path.join('/opt/aiy/models', name) with open(path, 'rb') as f: return f.read() Use VISION_BONNET_MODELS_PATH env var for custom models path. Change-Id: I687ca96e4cf768617fa45d50d68dadffde750b87
"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.environ.get('VISION_BONNET_MODELS_PATH', '/opt/aiy/models') with open(os.path.join(path, name), 'rb') as f: return f.read()
<commit_before>"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.path.join('/opt/aiy/models', name) with open(path, 'rb') as f: return f.read() <commit_msg>Use VISION_BONNET_MODELS_PATH env var for custom models path. Change-Id: I687ca96e4cf768617fa45d50d68dadffde750b87<commit_after>
"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.environ.get('VISION_BONNET_MODELS_PATH', '/opt/aiy/models') with open(os.path.join(path, name), 'rb') as f: return f.read()
"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.path.join('/opt/aiy/models', name) with open(path, 'rb') as f: return f.read() Use VISION_BONNET_MODELS_PATH env var for custom models path. Change-Id: I687ca96e4cf768617fa45d50d68dadffde750b87"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.environ.get('VISION_BONNET_MODELS_PATH', '/opt/aiy/models') with open(os.path.join(path, name), 'rb') as f: return f.read()
<commit_before>"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.path.join('/opt/aiy/models', name) with open(path, 'rb') as f: return f.read() <commit_msg>Use VISION_BONNET_MODELS_PATH env var for custom models path. Change-Id: I687ca96e4cf768617fa45d50d68dadffde750b87<commit_after>"""Utility to load compute graphs from diffrent sources.""" import os def load_compute_graph(name): path = os.environ.get('VISION_BONNET_MODELS_PATH', '/opt/aiy/models') with open(os.path.join(path, name), 'rb') as f: return f.read()
209c0d0201b76a0f2db7d8b507b2eaa2df03fcae
lib/stats.py
lib/stats.py
""" Statistics. """ from numpy import exp from scipy.stats import rv_continuous from scipy.special import gamma class grw_gen(rv_continuous): """ Generalized Reverse Weibull distribution. PDF: a/gamma(g) * x^(a*g-1) * exp(-x^a) for x,a,g >= 0 """ def _pdf(self,x,a,g): return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a)) def _fitstart(self,data): return (2.0,1.0,0.0,0.02) grw = grw_gen(a=0.0, name='grw', shapes='a,g')
""" Statistics. """ import numpy as np from scipy.stats import gengamma, norm """ Set default starting parameters for fitting a generalized gamma distribution. These parameters are sensible for ATLAS v_n distributions. Order: (a, c, loc, scale) where a,c are shape params. """ gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1) def fit_file(fname,dist='gengamma',**kwargs): """ Fit a distribution to each column of a data file. Arguments --------- fname -- file name or object containing data columns to fit dist -- distribution to fit, either 'gengamma' (default) or 'norm' kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored Returns ------- iterable of MLE parameters: params_0, ... , params_N for each column, where params are tuples of the form (*shapes, loc, scale) as produced by scipy.stats.rv_continuous.fit """ # remove 'unpack' and 'ndmin' kwargs if set for key in ['unpack','ndmin']: try: del kwargs[key] except KeyError: pass # read file cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs) # set fitting distribution try: dist = eval(dist) except NameError: raise ValueError('invalid distribution: ' + dist) return (dist.fit(c) for c in cols)
Replace custom GRW dist with scipy gengamma. Implement file fitting function.
Replace custom GRW dist with scipy gengamma. Implement file fitting function.
Python
mit
jbernhard/ebe-analysis
""" Statistics. """ from numpy import exp from scipy.stats import rv_continuous from scipy.special import gamma class grw_gen(rv_continuous): """ Generalized Reverse Weibull distribution. PDF: a/gamma(g) * x^(a*g-1) * exp(-x^a) for x,a,g >= 0 """ def _pdf(self,x,a,g): return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a)) def _fitstart(self,data): return (2.0,1.0,0.0,0.02) grw = grw_gen(a=0.0, name='grw', shapes='a,g') Replace custom GRW dist with scipy gengamma. Implement file fitting function.
""" Statistics. """ import numpy as np from scipy.stats import gengamma, norm """ Set default starting parameters for fitting a generalized gamma distribution. These parameters are sensible for ATLAS v_n distributions. Order: (a, c, loc, scale) where a,c are shape params. """ gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1) def fit_file(fname,dist='gengamma',**kwargs): """ Fit a distribution to each column of a data file. Arguments --------- fname -- file name or object containing data columns to fit dist -- distribution to fit, either 'gengamma' (default) or 'norm' kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored Returns ------- iterable of MLE parameters: params_0, ... , params_N for each column, where params are tuples of the form (*shapes, loc, scale) as produced by scipy.stats.rv_continuous.fit """ # remove 'unpack' and 'ndmin' kwargs if set for key in ['unpack','ndmin']: try: del kwargs[key] except KeyError: pass # read file cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs) # set fitting distribution try: dist = eval(dist) except NameError: raise ValueError('invalid distribution: ' + dist) return (dist.fit(c) for c in cols)
<commit_before>""" Statistics. """ from numpy import exp from scipy.stats import rv_continuous from scipy.special import gamma class grw_gen(rv_continuous): """ Generalized Reverse Weibull distribution. PDF: a/gamma(g) * x^(a*g-1) * exp(-x^a) for x,a,g >= 0 """ def _pdf(self,x,a,g): return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a)) def _fitstart(self,data): return (2.0,1.0,0.0,0.02) grw = grw_gen(a=0.0, name='grw', shapes='a,g') <commit_msg>Replace custom GRW dist with scipy gengamma. Implement file fitting function.<commit_after>
""" Statistics. """ import numpy as np from scipy.stats import gengamma, norm """ Set default starting parameters for fitting a generalized gamma distribution. These parameters are sensible for ATLAS v_n distributions. Order: (a, c, loc, scale) where a,c are shape params. """ gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1) def fit_file(fname,dist='gengamma',**kwargs): """ Fit a distribution to each column of a data file. Arguments --------- fname -- file name or object containing data columns to fit dist -- distribution to fit, either 'gengamma' (default) or 'norm' kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored Returns ------- iterable of MLE parameters: params_0, ... , params_N for each column, where params are tuples of the form (*shapes, loc, scale) as produced by scipy.stats.rv_continuous.fit """ # remove 'unpack' and 'ndmin' kwargs if set for key in ['unpack','ndmin']: try: del kwargs[key] except KeyError: pass # read file cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs) # set fitting distribution try: dist = eval(dist) except NameError: raise ValueError('invalid distribution: ' + dist) return (dist.fit(c) for c in cols)
""" Statistics. """ from numpy import exp from scipy.stats import rv_continuous from scipy.special import gamma class grw_gen(rv_continuous): """ Generalized Reverse Weibull distribution. PDF: a/gamma(g) * x^(a*g-1) * exp(-x^a) for x,a,g >= 0 """ def _pdf(self,x,a,g): return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a)) def _fitstart(self,data): return (2.0,1.0,0.0,0.02) grw = grw_gen(a=0.0, name='grw', shapes='a,g') Replace custom GRW dist with scipy gengamma. Implement file fitting function.""" Statistics. """ import numpy as np from scipy.stats import gengamma, norm """ Set default starting parameters for fitting a generalized gamma distribution. These parameters are sensible for ATLAS v_n distributions. Order: (a, c, loc, scale) where a,c are shape params. """ gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1) def fit_file(fname,dist='gengamma',**kwargs): """ Fit a distribution to each column of a data file. Arguments --------- fname -- file name or object containing data columns to fit dist -- distribution to fit, either 'gengamma' (default) or 'norm' kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored Returns ------- iterable of MLE parameters: params_0, ... , params_N for each column, where params are tuples of the form (*shapes, loc, scale) as produced by scipy.stats.rv_continuous.fit """ # remove 'unpack' and 'ndmin' kwargs if set for key in ['unpack','ndmin']: try: del kwargs[key] except KeyError: pass # read file cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs) # set fitting distribution try: dist = eval(dist) except NameError: raise ValueError('invalid distribution: ' + dist) return (dist.fit(c) for c in cols)
<commit_before>""" Statistics. """ from numpy import exp from scipy.stats import rv_continuous from scipy.special import gamma class grw_gen(rv_continuous): """ Generalized Reverse Weibull distribution. PDF: a/gamma(g) * x^(a*g-1) * exp(-x^a) for x,a,g >= 0 """ def _pdf(self,x,a,g): return a/gamma(g) * pow(x,a*g-1) * exp(-pow(x,a)) def _fitstart(self,data): return (2.0,1.0,0.0,0.02) grw = grw_gen(a=0.0, name='grw', shapes='a,g') <commit_msg>Replace custom GRW dist with scipy gengamma. Implement file fitting function.<commit_after>""" Statistics. """ import numpy as np from scipy.stats import gengamma, norm """ Set default starting parameters for fitting a generalized gamma distribution. These parameters are sensible for ATLAS v_n distributions. Order: (a, c, loc, scale) where a,c are shape params. """ gengamma._fitstart = lambda data: (1.0, 2.0, 0.0, 0.1) def fit_file(fname,dist='gengamma',**kwargs): """ Fit a distribution to each column of a data file. Arguments --------- fname -- file name or object containing data columns to fit dist -- distribution to fit, either 'gengamma' (default) or 'norm' kwargs -- for np.loadtxt, except 'unpack' or 'ndmin' are ignored Returns ------- iterable of MLE parameters: params_0, ... , params_N for each column, where params are tuples of the form (*shapes, loc, scale) as produced by scipy.stats.rv_continuous.fit """ # remove 'unpack' and 'ndmin' kwargs if set for key in ['unpack','ndmin']: try: del kwargs[key] except KeyError: pass # read file cols = np.loadtxt(fname,unpack=True,ndmin=2,**kwargs) # set fitting distribution try: dist = eval(dist) except NameError: raise ValueError('invalid distribution: ' + dist) return (dist.fit(c) for c in cols)
313aafc11f76888614e2a0523e9e858e71765eaa
tests/test_wc.py
tests/test_wc.py
# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version()))
# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
Add some more tests for wc module.
Add some more tests for wc module.
Python
lgpl-2.1
jelmer/subvertpy,jelmer/subvertpy
# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version())) Add some more tests for wc module.
# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
<commit_before># Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version())) <commit_msg>Add some more tests for wc module.<commit_after>
# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version())) Add some more tests for wc module.# Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
<commit_before># Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version())) <commit_msg>Add some more tests for wc module.<commit_after># Copyright (C) 2005-2007 Jelmer Vernooij <jelmer@samba.org> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
e5f662d9cebe4133705eca74a300c325d432ad04
anvil/components/cinder_client.py
anvil/components/cinder_client.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
Remove destruction of pips/test requires entries that don't exist.
Remove destruction of pips/test requires entries that don't exist.
Python
apache-2.0
stackforge/anvil,stackforge/anvil,mc2014/anvil,mc2014/anvil
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs) Remove destruction of pips/test requires entries that don't exist.
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs) <commit_msg>Remove destruction of pips/test requires entries that don't exist.<commit_after>
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs) Remove destruction of pips/test requires entries that don't exist.# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs) <commit_msg>Remove destruction of pips/test requires entries that don't exist.<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
2a2c9dc43a7d096dd5601f51c0407c36433e73e1
astropy/coordinates/__init__.py
astropy/coordinates/__init__.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .coordsystems import * from .distances import * from .earth import * from .transformations import * from .builtin_systems import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .baseframe import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs
Make coordinate 'frames' be imported
Make coordinate 'frames' be imported
Python
bsd-3-clause
kelle/astropy,lpsinger/astropy,dhomeier/astropy,StuartLittlefair/astropy,DougBurke/astropy,astropy/astropy,stargaser/astropy,AustereCuriosity/astropy,lpsinger/astropy,stargaser/astropy,pllim/astropy,astropy/astropy,funbaker/astropy,joergdietrich/astropy,joergdietrich/astropy,kelle/astropy,aleksandr-bakanov/astropy,kelle/astropy,funbaker/astropy,dhomeier/astropy,mhvk/astropy,bsipocz/astropy,bsipocz/astropy,joergdietrich/astropy,MSeifert04/astropy,AustereCuriosity/astropy,mhvk/astropy,dhomeier/astropy,bsipocz/astropy,tbabej/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,MSeifert04/astropy,StuartLittlefair/astropy,kelle/astropy,astropy/astropy,tbabej/astropy,funbaker/astropy,saimn/astropy,joergdietrich/astropy,pllim/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,astropy/astropy,mhvk/astropy,dhomeier/astropy,StuartLittlefair/astropy,DougBurke/astropy,pllim/astropy,DougBurke/astropy,stargaser/astropy,tbabej/astropy,larrybradley/astropy,lpsinger/astropy,saimn/astropy,mhvk/astropy,saimn/astropy,MSeifert04/astropy,StuartLittlefair/astropy,dhomeier/astropy,kelle/astropy,tbabej/astropy,bsipocz/astropy,funbaker/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,pllim/astropy,saimn/astropy,lpsinger/astropy,DougBurke/astropy,saimn/astropy,astropy/astropy,joergdietrich/astropy,stargaser/astropy,pllim/astropy,larrybradley/astropy,AustereCuriosity/astropy,tbabej/astropy,larrybradley/astropy,lpsinger/astropy,MSeifert04/astropy,mhvk/astropy
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .coordsystems import * from .distances import * from .earth import * from .transformations import * from .builtin_systems import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs Make coordinate 'frames' be imported
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .baseframe import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .coordsystems import * from .distances import * from .earth import * from .transformations import * from .builtin_systems import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs <commit_msg>Make coordinate 'frames' be imported<commit_after>
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .baseframe import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .coordsystems import * from .distances import * from .earth import * from .transformations import * from .builtin_systems import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs Make coordinate 'frames' be imported# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .baseframe import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .coordsystems import * from .distances import * from .earth import * from .transformations import * from .builtin_systems import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs <commit_msg>Make coordinate 'frames' be imported<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from .errors import * from .angles import * from .baseframe import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .old_builtin_systems_names import * # TODO: remove this in next version, along with module file from .representation import * __doc__ += builtin_systems._transform_graph_docs
ab1028ccd73ea39d14f1eee57b91b303b31e2d63
briefcase/__init__.py
briefcase/__init__.py
from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.1.9'
from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.2.0'
Bump version for v0.2.0 release.
Bump version for v0.2.0 release.
Python
bsd-3-clause
pybee/briefcase
from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.1.9' Bump version for v0.2.0 release.
from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.2.0'
<commit_before>from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.1.9' <commit_msg>Bump version for v0.2.0 release.<commit_after>
from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.2.0'
from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.1.9' Bump version for v0.2.0 release.from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.2.0'
<commit_before>from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.1.9' <commit_msg>Bump version for v0.2.0 release.<commit_after>from __future__ import print_function, unicode_literals, absolute_import, division __all__ = [ '__version__', ] # Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__ = '0.2.0'
87d1b24d8ee806c5aa6cf73d83472b129b0f87fe
mitty/simulation/genome/sampledgenome.py
mitty/simulation/genome/sampledgenome.py
import pysam from numpy.random import choice import math def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af - math.pow(default_af, 2), default_af, math.pow(default_af, 2)] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af - math.pow(af, 2), af, math.pow(af, 2)] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close()
import pysam from numpy.random import choice def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af * (1 + default_af), default_af, default_af * default_af] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af * (1 + af), af, af * af] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close()
Add random GT to a given vcf
Add random GT to a given vcf
Python
apache-2.0
sbg/Mitty,sbg/Mitty
import pysam from numpy.random import choice import math def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af - math.pow(default_af, 2), default_af, math.pow(default_af, 2)] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af - math.pow(af, 2), af, math.pow(af, 2)] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close() Add random GT to a given vcf
import pysam from numpy.random import choice def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af * (1 + default_af), default_af, default_af * default_af] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af * (1 + af), af, af * af] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close()
<commit_before>import pysam from numpy.random import choice import math def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af - math.pow(default_af, 2), default_af, math.pow(default_af, 2)] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af - math.pow(af, 2), af, math.pow(af, 2)] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close() <commit_msg>Add random GT to a given vcf<commit_after>
import pysam from numpy.random import choice def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af * (1 + default_af), default_af, default_af * default_af] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af * (1 + af), af, af * af] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close()
import pysam from numpy.random import choice import math def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af - math.pow(default_af, 2), default_af, math.pow(default_af, 2)] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af - math.pow(af, 2), af, math.pow(af, 2)] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close() Add random GT to a given vcfimport pysam from numpy.random import choice def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af * (1 + default_af), default_af, default_af * default_af] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af * (1 + af), af, af * af] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close()
<commit_before>import pysam from numpy.random import choice import math def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af - math.pow(default_af, 2), default_af, math.pow(default_af, 2)] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af - math.pow(af, 2), af, math.pow(af, 2)] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close() <commit_msg>Add random GT to a given vcf<commit_after>import pysam from numpy.random import choice def assign_random_gt(input_vcf, outname, sample_name="HG", default_af=0.01): vcf_pointer = pysam.VariantFile(filename=input_vcf) new_header = vcf_pointer.header.copy() if "GT" not in new_header.formats: new_header.formats.add("GT", "1", "String", "Consensus Genotype across all datasets with called genotype") new_header.samples.add(sample_name) default_probs = [1 - default_af * (1 + default_af), default_af, default_af * default_af] with open(outname, 'w') as out_vcf: out_vcf.write(str(new_header)) for rec in vcf_pointer.fetch(): rec_copy = rec.copy() if "GT" not in rec_copy.format.keys(): if "AF" not in rec_copy.info.keys(): gt_probs = default_probs else: af = rec_copy.info["AF"] gt_probs = [1 - af * (1 + af), af, af * af] c = choice(["0/0", "0/1", "1/1"], p=gt_probs) out_vcf.write("\t".join([str(rec_copy)[:-1], "GT", c]) + "\n") vcf_pointer.close()
1f8b54d22cee5653254514bf07c1b4cb1eb147cb
_grabconfig.py
_grabconfig.py
#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest)
#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest)
Remove test item from config grabbing script
Remove test item from config grabbing script
Python
unlicense
weloxux/dotfiles,weloxux/dotfiles
#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest) Remove test item from config grabbing script
#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest)
<commit_before>#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest) <commit_msg>Remove test item from config grabbing script<commit_after>
#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest)
#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest) Remove test item from config grabbing script#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest)
<commit_before>#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator", "/bla.txt"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest) <commit_msg>Remove test item from config grabbing script<commit_after>#!/usr/bin/env python2 import os, shutil files = ["/etc/crontab", "/usr/local/bin/ssu", "/usr/local/bin/xyzzy", "/home/dagon/.bashrc","/home/dagon/.i3status.conf", "/home/dagon/.profile", "/home/dagon/.vimrc", "/home/dagon/.i3/config", "/home/dagon/.vim", "/home/dagon/.config/bless", "/home/dagon/.config/terminator"] for item in files: dest = os.getcwd() + item if os.path.isdir(item): try: shutil.rmtree(dest) except: pass shutil.copytree(item, dest) else: try: os.remove(dest) except: pass shutil.copyfile(item, dest)
dd1bcf71c6548f99e6bc133bf890c87440e13535
taskflow/states.py
taskflow/states.py
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING RUNNING = 'RUNNING' # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS
Add a running state which can be used to know when a workflow is running.
Add a running state which can be used to know when a workflow is running.
Python
apache-2.0
openstack/taskflow,jessicalucci/TaskManagement,jimbobhickville/taskflow,openstack/taskflow,citrix-openstack-build/taskflow,jimbobhickville/taskflow,junneyang/taskflow,varunarya10/taskflow,pombredanne/taskflow-1,jessicalucci/TaskManagement,citrix-openstack-build/taskflow,junneyang/taskflow,pombredanne/taskflow-1,varunarya10/taskflow
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS Add a running state which can be used to know when a workflow is running.
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING RUNNING = 'RUNNING' # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS
<commit_before># -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS <commit_msg>Add a running state which can be used to know when a workflow is running.<commit_after>
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING RUNNING = 'RUNNING' # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS Add a running state which can be used to know when a workflow is running.# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING RUNNING = 'RUNNING' # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS
<commit_before># -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS <commit_msg>Add a running state which can be used to know when a workflow is running.<commit_after># -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Job states. CLAIMED = 'CLAIMED' FAILURE = 'FAILURE' PENDING = 'PENDING' REVERTING = 'REVERTING' SUCCESS = 'SUCCESS' UNCLAIMED = 'UNCLAIMED' RESUMING = 'RESUMING' # Flow states. FAILURE = FAILURE PENDING = PENDING REVERTING = REVERTING STARTED = 'STARTED' SUCCESS = SUCCESS RESUMING = RESUMING RUNNING = 'RUNNING' # Task states. FAILURE = FAILURE STARTED = STARTED SUCCESS = SUCCESS
cfeca089dd10a6853d2b969d2d248a0f7d506d1a
emission/net/usercache/formatters/android/motion_activity.py
emission/net/usercache/formatters/android/motion_activity.py
import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() data.type = ecwa.MotionTypes(entry.data.agb).value data.confidence = entry.data.agc data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry
import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() if 'agb' in entry.data: data.type = ecwa.MotionTypes(entry.data.agb).value else: data.type = ecwa.MotionTypes(entry.data.zzaEg).value if 'agc' in entry.data: data.confidence = entry.data.agc else: data.confidence = entry.data.zzaEh data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry
Handle the weird field names from the new version of the API
Handle the weird field names from the new version of the API As part of the switch to cordova, we moved to a newer version of the google play API. Unfortunately, this meant that the weird field names for the confidence and type changed to a different set of weird field names. We should really use a standard wrapper class for this on android as well to avoid such silent breakages in the future. https://github.com/e-mission/e-mission-data-collection/issues/80
Python
bsd-3-clause
e-mission/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,joshzarrabi/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server
import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() data.type = ecwa.MotionTypes(entry.data.agb).value data.confidence = entry.data.agc data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry Handle the weird field names from the new version of the API As part of the switch to cordova, we moved to a newer version of the google play API. Unfortunately, this meant that the weird field names for the confidence and type changed to a different set of weird field names. We should really use a standard wrapper class for this on android as well to avoid such silent breakages in the future. https://github.com/e-mission/e-mission-data-collection/issues/80
import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() if 'agb' in entry.data: data.type = ecwa.MotionTypes(entry.data.agb).value else: data.type = ecwa.MotionTypes(entry.data.zzaEg).value if 'agc' in entry.data: data.confidence = entry.data.agc else: data.confidence = entry.data.zzaEh data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry
<commit_before>import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() data.type = ecwa.MotionTypes(entry.data.agb).value data.confidence = entry.data.agc data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry <commit_msg>Handle the weird field names from the new version of the API As part of the switch to cordova, we moved to a newer version of the google play API. Unfortunately, this meant that the weird field names for the confidence and type changed to a different set of weird field names. We should really use a standard wrapper class for this on android as well to avoid such silent breakages in the future. https://github.com/e-mission/e-mission-data-collection/issues/80<commit_after>
import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() if 'agb' in entry.data: data.type = ecwa.MotionTypes(entry.data.agb).value else: data.type = ecwa.MotionTypes(entry.data.zzaEg).value if 'agc' in entry.data: data.confidence = entry.data.agc else: data.confidence = entry.data.zzaEh data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry
import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() data.type = ecwa.MotionTypes(entry.data.agb).value data.confidence = entry.data.agc data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry Handle the weird field names from the new version of the API As part of the switch to cordova, we moved to a newer version of the google play API. Unfortunately, this meant that the weird field names for the confidence and type changed to a different set of weird field names. We should really use a standard wrapper class for this on android as well to avoid such silent breakages in the future. https://github.com/e-mission/e-mission-data-collection/issues/80import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() if 'agb' in entry.data: data.type = ecwa.MotionTypes(entry.data.agb).value else: data.type = ecwa.MotionTypes(entry.data.zzaEg).value if 'agc' in entry.data: data.confidence = entry.data.agc else: data.confidence = entry.data.zzaEh data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry
<commit_before>import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() data.type = ecwa.MotionTypes(entry.data.agb).value data.confidence = entry.data.agc data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry <commit_msg>Handle the weird field names from the new version of the API As part of the switch to cordova, we moved to a newer version of the google play API. Unfortunately, this meant that the weird field names for the confidence and type changed to a different set of weird field names. We should really use a standard wrapper class for this on android as well to avoid such silent breakages in the future. https://github.com/e-mission/e-mission-data-collection/issues/80<commit_after>import logging import emission.core.wrapper.motionactivity as ecwa import emission.net.usercache.formatters.common as fc import attrdict as ad def format(entry): formatted_entry = ad.AttrDict() formatted_entry["_id"] = entry["_id"] formatted_entry.user_id = entry.user_id metadata = entry.metadata if "time_zone" not in metadata: metadata.time_zone = "America/Los_Angeles" fc.expand_metadata_times(metadata) formatted_entry.metadata = metadata data = ad.AttrDict() if 'agb' in entry.data: data.type = ecwa.MotionTypes(entry.data.agb).value else: data.type = ecwa.MotionTypes(entry.data.zzaEg).value if 'agc' in entry.data: data.confidence = entry.data.agc else: data.confidence = entry.data.zzaEh data.ts = formatted_entry.metadata.write_ts data.local_dt = formatted_entry.metadata.write_local_dt data.fmt_time = formatted_entry.metadata.write_fmt_time formatted_entry.data = data return formatted_entry
5aa3dbf8f520f9ffaaed51ed397eb9f4c722882a
sample_app/__init__.py
sample_app/__init__.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
Fix pep8 order import issue
Fix pep8 order import issue
Python
apache-2.0
brantlk/python-sample-app
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application Fix pep8 order import issue
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application <commit_msg>Fix pep8 order import issue<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application Fix pep8 order import issue# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import ConfigParser import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application <commit_msg>Fix pep8 order import issue<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import json import falcon class _SimpleResource(object): def __init__(self, conf): try: message = conf.get('sample_app', 'message') except ConfigParser.Error: message = 'something' self._message = message def on_get(self, req, resp): resp.body = json.dumps({'message': self._message}) resp.set_header('Content-Type', 'application/json') def on_put(self, req, resp): doc = json.load(req.stream) self._message = doc['message'] resp.body = json.dumps({'message': self._message}) def make_application(): conf = ConfigParser.RawConfigParser() conf.read(['/etc/sample_app/sample_app.conf']) application = falcon.API() application.add_route('/', _SimpleResource(conf)) return application
4146551d191152ae486f079911f53086f3a60a07
api/models.py
api/models.py
from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() version = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',)
from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() question = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',)
Rename version field in Choice to question
Rename version field in Choice to question
Python
mit
holycattle/pysqueak-api,holycattle/pysqueak-api
from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() version = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',) Rename version field in Choice to question
from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() question = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',)
<commit_before>from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() version = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',) <commit_msg>Rename version field in Choice to question<commit_after>
from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() question = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',)
from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() version = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',) Rename version field in Choice to questionfrom django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() question = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',)
<commit_before>from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() version = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',) <commit_msg>Rename version field in Choice to question<commit_after>from django.db import models from rest_framework import serializers class Question(models.Model): version = models.CharField(primary_key=True, max_length=8) text = models.TextField() created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Choice(models.Model): text = models.TextField() question = models.ForeignKey(Question, on_delete=models.CASCADE) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) class Answer(models.Model): choice = models.ForeignKey(Choice, on_delete=models.CASCADE) user_id = models.TextField() created_on = models.DateTimeField(auto_now_add=True) class ChoiceSerializer(serializers.ModelSerializer): class Meta: model = Choice fields = ('id', 'text', 'version', 'created_on', 'updated_on',) class QuestionSerializer(serializers.ModelSerializer): # TODO: create a serializer that returns list of choices for the question class Meta: model = Question fields = ('text', 'version', 'created_on', 'updated_on',) class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer fields = ('id', 'choice_id', 'user_id', 'created_on',)
b9d54da73e5c4d859aa5ad8e9d8b96bb7527ae6d
api/models.py
api/models.py
from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField()
from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() mid = models.ForeignKey("Monster") owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Position(models.Model): uid = models.ForeignKey("User") latitude = models.FloatField() longitude = models.FloatField() time = models.DateField(auto_now=True) class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField()
Add position table and add field mid in box tables
Add position table and add field mid in box tables
Python
apache-2.0
g82411/s_square
from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField() Add position table and add field mid in box tables
from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() mid = models.ForeignKey("Monster") owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Position(models.Model): uid = models.ForeignKey("User") latitude = models.FloatField() longitude = models.FloatField() time = models.DateField(auto_now=True) class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField()
<commit_before>from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField() <commit_msg>Add position table and add field mid in box tables<commit_after>
from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() mid = models.ForeignKey("Monster") owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Position(models.Model): uid = models.ForeignKey("User") latitude = models.FloatField() longitude = models.FloatField() time = models.DateField(auto_now=True) class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField()
from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField() Add position table and add field mid in box tablesfrom django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() mid = models.ForeignKey("Monster") owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Position(models.Model): uid = models.ForeignKey("User") latitude = models.FloatField() longitude = models.FloatField() time = models.DateField(auto_now=True) class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField()
<commit_before>from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField() <commit_msg>Add position table and add field mid in box tables<commit_after>from django.db import models # Create your models here. class User(models.Model): uid = models.AutoField(primary_key=True) uname = models.CharField(max_length=128) session = models.CharField(max_length=35,unique=True) class Box(models.Model): bid = models.AutoField(primary_key=True) level = models.IntegerField() mid = models.ForeignKey("Monster") owner = models.ForeignKey("User") class Party(models.Model): uid = models.ForeignKey("User") fir = models.ForeignKey("Box",related_name="first") sec = models.ForeignKey("Box",related_name="second") thi = models.ForeignKey("Box",related_name="third") fou = models.ForeignKey("Box",related_name="fourth") class Position(models.Model): uid = models.ForeignKey("User") latitude = models.FloatField() longitude = models.FloatField() time = models.DateField(auto_now=True) class Monster(models.Model): mid = models.AutoField(primary_key=True) mname = models.CharField(max_length=128) initHP = models.IntegerField() initAtk = models.IntegerField() groHP = models.FloatField() groAtk = models.FloatField() class Skill(models.Model): sid = models.AutoField(primary_key=True) target = models.CharField(max_length=40) function = models.TextField()
c28de15fd8cade476fa8d7af904826dcea3c0f3e
python.py
python.py
# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA)
# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) # unit testing with unittest def fun(x): return x + 1 class TestAddingMethod(unittest.TestCase): def test_three(self): self.assertEqual(fun(3), 4)
Add Python note on simple unit test
Add Python note on simple unit test
Python
cc0-1.0
erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes
# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) Add Python note on simple unit test
# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) # unit testing with unittest def fun(x): return x + 1 class TestAddingMethod(unittest.TestCase): def test_three(self): self.assertEqual(fun(3), 4)
<commit_before># Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) <commit_msg>Add Python note on simple unit test<commit_after>
# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) # unit testing with unittest def fun(x): return x + 1 class TestAddingMethod(unittest.TestCase): def test_three(self): self.assertEqual(fun(3), 4)
# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) Add Python note on simple unit test# Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) # unit testing with unittest def fun(x): return x + 1 class TestAddingMethod(unittest.TestCase): def test_three(self): self.assertEqual(fun(3), 4)
<commit_before># Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) <commit_msg>Add Python note on simple unit test<commit_after># Python Notes # Version 2.7 # for loop for i in range(10): print i # check list elements of matching string randList = ["a", "ab", "bc", "de", "abc"] toFind = "a" print [x for x in randList if toFind in x] # read file with open("filename.txt", "r") as fh: data = fh.readline() # read line by line # data = fh.read() # read entire file # reload local package if modified reload(package) # check if file/directory exists import os.path os.path.isfile(FILENAME) # tests specifically files os.path.exists(ANYTHING) # tests files and directories # create generator with yield command def createGenerator(): mylist = range(3) for i in mylist: yield i * i # clone instead of point to a set object setA = set([1, 2, 3, 4]) setB = set(setA) # unit testing with unittest def fun(x): return x + 1 class TestAddingMethod(unittest.TestCase): def test_three(self): self.assertEqual(fun(3), 4)
80cdc54dbe41c243c4620472aa8ba5c6ece40324
etl_framework/DataTable.py
etl_framework/DataTable.py
class DataRow(dict): """object for holding row of data""" def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names)
class DataRow(dict): """object for holding row of data""" def __init__(self, *args, **kwargs): """creates instance of DataRow""" super(DataRow, self).__init__(*args, **kwargs) self.target_table = None def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) def set_target_table(self, target_table): """sets target table attribute""" self.target_table = target_table def get_target_table(self): """returns target table attribute""" return self.target_table class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names)
Add target_table attribute to DataRow
Add target_table attribute to DataRow
Python
mit
pantheon-systems/etl-framework
class DataRow(dict): """object for holding row of data""" def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names) Add target_table attribute to DataRow
class DataRow(dict): """object for holding row of data""" def __init__(self, *args, **kwargs): """creates instance of DataRow""" super(DataRow, self).__init__(*args, **kwargs) self.target_table = None def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) def set_target_table(self, target_table): """sets target table attribute""" self.target_table = target_table def get_target_table(self): """returns target table attribute""" return self.target_table class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names)
<commit_before>class DataRow(dict): """object for holding row of data""" def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names) <commit_msg>Add target_table attribute to DataRow<commit_after>
class DataRow(dict): """object for holding row of data""" def __init__(self, *args, **kwargs): """creates instance of DataRow""" super(DataRow, self).__init__(*args, **kwargs) self.target_table = None def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) def set_target_table(self, target_table): """sets target table attribute""" self.target_table = target_table def get_target_table(self): """returns target table attribute""" return self.target_table class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names)
class DataRow(dict): """object for holding row of data""" def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names) Add target_table attribute to DataRowclass DataRow(dict): """object for holding row of data""" def __init__(self, *args, **kwargs): """creates instance of DataRow""" super(DataRow, self).__init__(*args, **kwargs) self.target_table = None def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) def set_target_table(self, target_table): """sets target table attribute""" self.target_table = target_table def get_target_table(self): """returns target table attribute""" return self.target_table class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names)
<commit_before>class DataRow(dict): """object for holding row of data""" def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names) <commit_msg>Add target_table attribute to DataRow<commit_after>class DataRow(dict): """object for holding row of data""" def __init__(self, *args, **kwargs): """creates instance of DataRow""" super(DataRow, self).__init__(*args, **kwargs) self.target_table = None def row_values(self, field_names, default_value=None): """returns row value of specified field_names""" return tuple(self.get(field_name, default_value) for field_name in field_names) def set_target_table(self, target_table): """sets target table attribute""" self.target_table = target_table def get_target_table(self): """returns target table attribute""" return self.target_table class DataTable(object): """object for holding data""" def __init__(self, data, keys=None): """instantiates Table object with rows(which should be a list of dictionaries)""" self.rows = list(data) #set keys as _keys of first row by default if keys: self._keys = keys else: self._keys = self.rows[0].keys() def keys(self): """returns keys of Table""" return self._keys def append_row(self, row): """adds another row to table""" self.rows.append(row) def iterrows(self, field_names, default_value=None): """generator that yields specified fields for each row""" for row in self.rows: yield tuple(row.get(field_name, default_value) for field_name in field_names)
837b63918c29c1cd45a2a0daf8e6ff6e3b28bfb7
merc/features/ts6/sid.py
merc/features/ts6/sid.py
from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description))
from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! pass @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description))
Fix typo breaking the TS6 feature.
Fix typo breaking the TS6 feature.
Python
mit
merc-devel/merc
from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description)) Fix typo breaking the TS6 feature.
from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! pass @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description))
<commit_before>from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description)) <commit_msg>Fix typo breaking the TS6 feature.<commit_after>
from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! pass @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description))
from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description)) Fix typo breaking the TS6 feature.from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! pass @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description))
<commit_before>from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description)) <commit_msg>Fix typo breaking the TS6 feature.<commit_after>from merc import errors from merc import feature from merc import message from merc import util class SidFeature(feature.Feature): NAME = __name__ install = SidFeature.install @SidFeature.register_server_command class Sid(message.Command): NAME = "SID" MIN_ARITY = 4 def __init__(self, server_name, hopcount, sid, description, *args): self.server_name = server_name self.hopcount = hopcount self.sid = sid self.description = description def as_command_params(self): return [self.server_name, self.hopcount, self.sid, self.description] def handle_for(self, app, server, prefix): # TODO: handle me! pass @SidFeature.hook("network.burst.sid") def burst_sids(app, server): for source, target in app.network.all_links(): server.send(source.sid, Sid(target.name, "1", target.sid, target.description))
cad48e91776ded810b23c336380797c88dd456c0
services/netflix.py
services/netflix.py
import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, redirect_uri): params = super(Netflix, self).get_authorize_params(redirect_uri) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id']
import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, *args, **kwargs): params = super(Netflix, self).get_authorize_params(*args, **kwargs) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id']
Fix Netflix in light of the new scope selection system
Fix Netflix in light of the new scope selection system
Python
bsd-3-clause
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, redirect_uri): params = super(Netflix, self).get_authorize_params(redirect_uri) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id'] Fix Netflix in light of the new scope selection system
import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, *args, **kwargs): params = super(Netflix, self).get_authorize_params(*args, **kwargs) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id']
<commit_before>import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, redirect_uri): params = super(Netflix, self).get_authorize_params(redirect_uri) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id'] <commit_msg>Fix Netflix in light of the new scope selection system<commit_after>
import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, *args, **kwargs): params = super(Netflix, self).get_authorize_params(*args, **kwargs) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id']
import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, redirect_uri): params = super(Netflix, self).get_authorize_params(redirect_uri) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id'] Fix Netflix in light of the new scope selection systemimport urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, *args, **kwargs): params = super(Netflix, self).get_authorize_params(*args, **kwargs) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id']
<commit_before>import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, redirect_uri): params = super(Netflix, self).get_authorize_params(redirect_uri) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id'] <commit_msg>Fix Netflix in light of the new scope selection system<commit_after>import urlparse import foauth.providers from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY class Netflix(foauth.providers.OAuth1): # General info about the provider provider_url = 'https://www.netflix.com/' docs_url = 'http://developer.netflix.com/docs' # URLs to interact with the API request_token_url = 'http://api.netflix.com/oauth/request_token' authorize_url = 'https://api-user.netflix.com/oauth/login' access_token_url = 'http://api.netflix.com/oauth/access_token' api_domains = ['api-public.netflix.com', 'api.netflix.com'] available_permissions = [ (None, 'read and manage your queue'), ] https = False signature_type = SIGNATURE_TYPE_QUERY def get_authorize_params(self, *args, **kwargs): params = super(Netflix, self).get_authorize_params(*args, **kwargs) params['oauth_consumer_key'] = self.client_id return params def get_user_id(self, key): r = self.api(key, self.api_domains[0], u'/users/current', params={'output': 'json'}) redirect = r.json[u'resource'][u'link'][u'href'] parts = urlparse.urlparse(redirect) r = self.api(key, parts.netloc, parts.path, params={'output': 'json'}) return r.json[u'user'][u'user_id']
6eae9369cc122b23577715951d6b0f59991b0f65
saleor/csv/__init__.py
saleor/csv/__init__.py
class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain csv file."), (XLSX, "Excel .xlsx file."), ]
class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain CSV file."), (XLSX, "Excel XLSX file."), ]
Update choices descriptions in FileTypes
Update choices descriptions in FileTypes
Python
bsd-3-clause
mociepka/saleor,mociepka/saleor,mociepka/saleor
class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain csv file."), (XLSX, "Excel .xlsx file."), ] Update choices descriptions in FileTypes
class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain CSV file."), (XLSX, "Excel XLSX file."), ]
<commit_before>class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain csv file."), (XLSX, "Excel .xlsx file."), ] <commit_msg>Update choices descriptions in FileTypes<commit_after>
class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain CSV file."), (XLSX, "Excel XLSX file."), ]
class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain csv file."), (XLSX, "Excel .xlsx file."), ] Update choices descriptions in FileTypesclass ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain CSV file."), (XLSX, "Excel XLSX file."), ]
<commit_before>class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain csv file."), (XLSX, "Excel .xlsx file."), ] <commit_msg>Update choices descriptions in FileTypes<commit_after>class ExportEvents: """The different csv events types.""" EXPORT_PENDING = "export_pending" EXPORT_SUCCESS = "export_success" EXPORT_FAILED = "export_failed" EXPORT_DELETED = "export_deleted" EXPORTED_FILE_SENT = "exported_file_sent" CHOICES = [ (EXPORT_PENDING, "Data export was started."), (EXPORT_SUCCESS, "Data export was completed successfully."), (EXPORT_FAILED, "Data export failed."), (EXPORT_DELETED, "Export file was started."), ( EXPORTED_FILE_SENT, "Email with link to download csv file was sent to the customer.", ), ] class FileTypes: CSV = "csv" XLSX = "xlsx" CHOICES = [ (CSV, "Plain CSV file."), (XLSX, "Excel XLSX file."), ]
b6f04f32556fc8251566212c56159dcfff7bf596
pi_approach/Distance_Pi/distance.py
pi_approach/Distance_Pi/distance.py
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main()
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" client.send_data("0") def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main()
Add unexpected character bug fix
Add unexpected character bug fix
Python
mit
the-raspberry-pi-guy/lidar
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main() Add unexpected character bug fix
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" client.send_data("0") def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main()
<commit_before># Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main() <commit_msg>Add unexpected character bug fix<commit_after>
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" client.send_data("0") def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main()
# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main() Add unexpected character bug fix# Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" client.send_data("0") def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main()
<commit_before># Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main() <commit_msg>Add unexpected character bug fix<commit_after># Lidar Project Distance Subsystem import serial import socket import time import sys sys.path.insert(0, "/home/pi/lidar/pi_approach/Libraries") import serverxclient as cli arduino_dist = serial.Serial('/dev/ttyUSB0',9600) client = cli.Client() class distance_controller(object): """An all-powerful distance-finding controller""" def get_distance(self): distance = arduino_dist.readline() return distance def setup_handshake(self): connected = False while not connected: try: client.socket_connection() connected = True except: print "Failure" time.sleep(2) received_communication = client.receive_data() if received_communication == "VERIFY?": hand_shake = "DISTANCE!" client.send_data(hand_shake) else: print "Unidentified communication" def active_listen(self): received_communication = client.receive_data() if received_communication == "FIRE": result = self.get_distance() try: test_int = int(result) print result client.send_data(result) except: print "Unexpected character" client.send_data("0") def main(self): self.setup_handshake() while True: self.active_listen() distance = distance_controller() distance.main()
069e98f036c77f635a955ea2c48580709089e702
src/conference_scheduler/resources.py
src/conference_scheduler/resources.py
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
Set default values for `tags` and `availability`
Set default values for `tags` and `availability`
Python
mit
PyconUK/ConferenceScheduler
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool Set default values for `tags` and `availability`
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
<commit_before>from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool <commit_msg>Set default values for `tags` and `availability`<commit_after>
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool Set default values for `tags` and `availability`from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
<commit_before>from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool <commit_msg>Set default values for `tags` and `availability`<commit_after>from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
1112f3602c147f469c21181c5c61d480b3f2ed75
opps/api/views/generic/list.py
opps/api/views/generic/list.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain filters['channel_long_slug__in'] = self.channel_long_slug filters['date_available__lte'] = timezone.now() filters['published'] = True filters['show_on_root_channel'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone()
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain try: if queryset.model._meta.get_field_by_name('channel_long_slug'): filters['channel_long_slug__in'] = self.channel_long_slug except: pass filters['date_available__lte'] = timezone.now() filters['published'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone()
Fix not exis channel_long_slug on API access
Fix not exis channel_long_slug on API access
Python
mit
YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain filters['channel_long_slug__in'] = self.channel_long_slug filters['date_available__lte'] = timezone.now() filters['published'] = True filters['show_on_root_channel'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone() Fix not exis channel_long_slug on API access
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain try: if queryset.model._meta.get_field_by_name('channel_long_slug'): filters['channel_long_slug__in'] = self.channel_long_slug except: pass filters['date_available__lte'] = timezone.now() filters['published'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain filters['channel_long_slug__in'] = self.channel_long_slug filters['date_available__lte'] = timezone.now() filters['published'] = True filters['show_on_root_channel'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone() <commit_msg>Fix not exis channel_long_slug on API access<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain try: if queryset.model._meta.get_field_by_name('channel_long_slug'): filters['channel_long_slug__in'] = self.channel_long_slug except: pass filters['date_available__lte'] = timezone.now() filters['published'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone()
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain filters['channel_long_slug__in'] = self.channel_long_slug filters['date_available__lte'] = timezone.now() filters['published'] = True filters['show_on_root_channel'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone() Fix not exis channel_long_slug on API access#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain try: if queryset.model._meta.get_field_by_name('channel_long_slug'): filters['channel_long_slug__in'] = self.channel_long_slug except: pass filters['date_available__lte'] = timezone.now() filters['published'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain filters['channel_long_slug__in'] = self.channel_long_slug filters['date_available__lte'] = timezone.now() filters['published'] = True filters['show_on_root_channel'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone() <commit_msg>Fix not exis channel_long_slug on API access<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib.sites.models import get_current_site from django.utils import timezone from rest_framework.generics import ListAPIView as RestListAPIView from opps.views.generic.base import View from opps.containers.models import ContainerBox class ListView(View, RestListAPIView): def get_queryset(self): self.long_slug = self.get_long_slug() self.site = get_current_site(self.request) if not self.long_slug: return None self.set_channel_rules() self.articleboxes = ContainerBox.objects.filter( channel__long_slug=self.long_slug) for box in self.articleboxes: self.excluded_ids.update([a.pk for a in box.ordered_containers()]) queryset = super(ListView, self).get_queryset() filters = {} filters['site_domain'] = self.site.domain try: if queryset.model._meta.get_field_by_name('channel_long_slug'): filters['channel_long_slug__in'] = self.channel_long_slug except: pass filters['date_available__lte'] = timezone.now() filters['published'] = True queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids) return queryset._clone()
6b8f66ed0bcaa62b3afd9fea7d749916d768847d
scripts/midnightRun.py
scripts/midnightRun.py
from recover.models import * from recover.patient_data import * from datetime import date def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced)
from recover.models import * from recover.patient_data import * import datetime def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) patient.date_last_synced = datetime.datetime.now()
Update 'date_last_synced' field on each patient after midnight fetching
Update 'date_last_synced' field on each patient after midnight fetching
Python
mit
SLU-Capstone/Recover,SLU-Capstone/Recover,SLU-Capstone/Recover
from recover.models import * from recover.patient_data import * from datetime import date def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) Update 'date_last_synced' field on each patient after midnight fetching
from recover.models import * from recover.patient_data import * import datetime def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) patient.date_last_synced = datetime.datetime.now()
<commit_before>from recover.models import * from recover.patient_data import * from datetime import date def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) <commit_msg>Update 'date_last_synced' field on each patient after midnight fetching<commit_after>
from recover.models import * from recover.patient_data import * import datetime def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) patient.date_last_synced = datetime.datetime.now()
from recover.models import * from recover.patient_data import * from datetime import date def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) Update 'date_last_synced' field on each patient after midnight fetchingfrom recover.models import * from recover.patient_data import * import datetime def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) patient.date_last_synced = datetime.datetime.now()
<commit_before>from recover.models import * from recover.patient_data import * from datetime import date def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) <commit_msg>Update 'date_last_synced' field on each patient after midnight fetching<commit_after>from recover.models import * from recover.patient_data import * import datetime def midnightRun(): physicians = User.objects() for physician in physicians: patients = physician.patients for patient in patients: data = PatientData(patient) last_synced = patient.date_last_synced.isoformat() last_synced = last_synced[0:10] data.get_heart_rate_data_for_date_range(last_synced) data.get_activity_data_for_date_range(last_synced) patient.date_last_synced = datetime.datetime.now()
f5bb9e5f388c4ac222da2318638266fdfbe925f0
beam/vendor.py
beam/vendor.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name self.endpoint = endpoint def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint)
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name """ The vendor's name, e.g. "RamNode". """ self.endpoint = endpoint """ The hostname of the SolusVM control panel, with protocol. """ def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint)
Add documentation to Vendor properties
Add documentation to Vendor properties
Python
mit
gebn/beam,gebn/beam
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name self.endpoint = endpoint def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint) Add documentation to Vendor properties
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name """ The vendor's name, e.g. "RamNode". """ self.endpoint = endpoint """ The hostname of the SolusVM control panel, with protocol. """ def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint)
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name self.endpoint = endpoint def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint) <commit_msg>Add documentation to Vendor properties<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name """ The vendor's name, e.g. "RamNode". """ self.endpoint = endpoint """ The hostname of the SolusVM control panel, with protocol. """ def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint)
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name self.endpoint = endpoint def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint) Add documentation to Vendor properties# -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name """ The vendor's name, e.g. "RamNode". """ self.endpoint = endpoint """ The hostname of the SolusVM control panel, with protocol. """ def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint)
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name self.endpoint = endpoint def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint) <commit_msg>Add documentation to Vendor properties<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals import six @six.python_2_unicode_compatible class Vendor(object): """ Represents a VPS provider. """ def __init__(self, name, endpoint): """ Initialise a new vendor object. :param name: The name of the vendor, e.g. "RamNode". :param endpoint: The hostname of the SolusVM control panel, with protocol. """ self.name = name """ The vendor's name, e.g. "RamNode". """ self.endpoint = endpoint """ The hostname of the SolusVM control panel, with protocol. """ def __hash__(self): """ Retrieve a hash value for this object. :return: This object's hash. Identical objects will have an identical hash. """ return hash(self.name) def __eq__(self, other): """ Test whether this vendor is identical to another. :param other: The object to compare to this one. :return: True if the objects are identical, false otherwise. """ return isinstance(other, self.__class__) and other.name == self.name def __str__(self): """ Generate a human-readable string representation of this vendor. :return: This host as a friendly string. """ return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.endpoint)
3793ef9014b72d5f48e7df0e91521cfdb4b06134
pycroft/lib/infrastructure.py
pycroft/lib/infrastructure.py
from pycroft.lib.logging import log_room_event, log_event from pycroft.model.host import SwitchPort, Host, Switch from pycroft.model.port import PatchPort from pycroft.model.session import with_transaction, session from pycroft.model.user import User @with_transaction def edit_port_relation(switchport, patchport, switchport_name, patchport_name, room, processor): if patchport.room != room: log_room_event("Changed room of SP {} → PP {} from {} to {}."\ .format(switchport.name, patchport.name, patchport.room.short_name, room.short_name), processor, switchport.switch.host.room) patchport.room = room if switchport.name != switchport_name or patchport.name != patchport_name: log_room_event("Changed relation SP {} → PP {} to {} → {} on {}.".format(switchport.name, patchport.name, switchport_name, patchport_name, switchport.switch.name), processor, switchport.switch.host.room) switchport.name = switchport_name patchport.name = patchport_name @with_transaction def create_port_relation(switch, switchport_name, patchport_name, room, processor): switchport = SwitchPort(name=switchport_name, switch=switch) patchport = PatchPort(name=patchport_name, room=room, switch_port=switchport) log_room_event("Created relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) @with_transaction def delete_port_relation(switchport, patchport, processor): log_room_event( "Deleted relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) session.delete(patchport) session.delete(switchport)
Add lib functions to add/edit/delete port relations
Add lib functions to add/edit/delete port relations
Python
apache-2.0
agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft
Add lib functions to add/edit/delete port relations
from pycroft.lib.logging import log_room_event, log_event from pycroft.model.host import SwitchPort, Host, Switch from pycroft.model.port import PatchPort from pycroft.model.session import with_transaction, session from pycroft.model.user import User @with_transaction def edit_port_relation(switchport, patchport, switchport_name, patchport_name, room, processor): if patchport.room != room: log_room_event("Changed room of SP {} → PP {} from {} to {}."\ .format(switchport.name, patchport.name, patchport.room.short_name, room.short_name), processor, switchport.switch.host.room) patchport.room = room if switchport.name != switchport_name or patchport.name != patchport_name: log_room_event("Changed relation SP {} → PP {} to {} → {} on {}.".format(switchport.name, patchport.name, switchport_name, patchport_name, switchport.switch.name), processor, switchport.switch.host.room) switchport.name = switchport_name patchport.name = patchport_name @with_transaction def create_port_relation(switch, switchport_name, patchport_name, room, processor): switchport = SwitchPort(name=switchport_name, switch=switch) patchport = PatchPort(name=patchport_name, room=room, switch_port=switchport) log_room_event("Created relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) @with_transaction def delete_port_relation(switchport, patchport, processor): log_room_event( "Deleted relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) session.delete(patchport) session.delete(switchport)
<commit_before><commit_msg>Add lib functions to add/edit/delete port relations<commit_after>
from pycroft.lib.logging import log_room_event, log_event from pycroft.model.host import SwitchPort, Host, Switch from pycroft.model.port import PatchPort from pycroft.model.session import with_transaction, session from pycroft.model.user import User @with_transaction def edit_port_relation(switchport, patchport, switchport_name, patchport_name, room, processor): if patchport.room != room: log_room_event("Changed room of SP {} → PP {} from {} to {}."\ .format(switchport.name, patchport.name, patchport.room.short_name, room.short_name), processor, switchport.switch.host.room) patchport.room = room if switchport.name != switchport_name or patchport.name != patchport_name: log_room_event("Changed relation SP {} → PP {} to {} → {} on {}.".format(switchport.name, patchport.name, switchport_name, patchport_name, switchport.switch.name), processor, switchport.switch.host.room) switchport.name = switchport_name patchport.name = patchport_name @with_transaction def create_port_relation(switch, switchport_name, patchport_name, room, processor): switchport = SwitchPort(name=switchport_name, switch=switch) patchport = PatchPort(name=patchport_name, room=room, switch_port=switchport) log_room_event("Created relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) @with_transaction def delete_port_relation(switchport, patchport, processor): log_room_event( "Deleted relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) session.delete(patchport) session.delete(switchport)
Add lib functions to add/edit/delete port relationsfrom pycroft.lib.logging import log_room_event, log_event from pycroft.model.host import SwitchPort, Host, Switch from pycroft.model.port import PatchPort from pycroft.model.session import with_transaction, session from pycroft.model.user import User @with_transaction def edit_port_relation(switchport, patchport, switchport_name, patchport_name, room, processor): if patchport.room != room: log_room_event("Changed room of SP {} → PP {} from {} to {}."\ .format(switchport.name, patchport.name, patchport.room.short_name, room.short_name), processor, switchport.switch.host.room) patchport.room = room if switchport.name != switchport_name or patchport.name != patchport_name: log_room_event("Changed relation SP {} → PP {} to {} → {} on {}.".format(switchport.name, patchport.name, switchport_name, patchport_name, switchport.switch.name), processor, switchport.switch.host.room) switchport.name = switchport_name patchport.name = patchport_name @with_transaction def create_port_relation(switch, switchport_name, patchport_name, room, processor): switchport = SwitchPort(name=switchport_name, switch=switch) patchport = PatchPort(name=patchport_name, room=room, switch_port=switchport) log_room_event("Created relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) @with_transaction def delete_port_relation(switchport, patchport, processor): log_room_event( "Deleted relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) session.delete(patchport) session.delete(switchport)
<commit_before><commit_msg>Add lib functions to add/edit/delete port relations<commit_after>from pycroft.lib.logging import log_room_event, log_event from pycroft.model.host import SwitchPort, Host, Switch from pycroft.model.port import PatchPort from pycroft.model.session import with_transaction, session from pycroft.model.user import User @with_transaction def edit_port_relation(switchport, patchport, switchport_name, patchport_name, room, processor): if patchport.room != room: log_room_event("Changed room of SP {} → PP {} from {} to {}."\ .format(switchport.name, patchport.name, patchport.room.short_name, room.short_name), processor, switchport.switch.host.room) patchport.room = room if switchport.name != switchport_name or patchport.name != patchport_name: log_room_event("Changed relation SP {} → PP {} to {} → {} on {}.".format(switchport.name, patchport.name, switchport_name, patchport_name, switchport.switch.name), processor, switchport.switch.host.room) switchport.name = switchport_name patchport.name = patchport_name @with_transaction def create_port_relation(switch, switchport_name, patchport_name, room, processor): switchport = SwitchPort(name=switchport_name, switch=switch) patchport = PatchPort(name=patchport_name, room=room, switch_port=switchport) log_room_event("Created relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) @with_transaction def delete_port_relation(switchport, patchport, processor): log_room_event( "Deleted relation SP {} → PP {} on {}.".format(switchport.name, patchport.name, switchport.switch.name), processor, switchport.switch.host.room) session.delete(patchport) session.delete(switchport)
c4b8cce856777b08a8ffd5a85567389102aea2c2
qregexeditor/api/quick_ref.py
qregexeditor/api/quick_ref.py
""" Contains the quick reference widget """ from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self)
""" Contains the quick reference widget """ import re from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) self._fix_default_font_size() def _fix_default_font_size(self): # remove fixed font size to allow the user to zoom in/out using # Ctrl+Mouse Wheel # Note: Zooming into HTML documents only works if the font-size is not # set to a fixed size. # (source: http://qt-project.org/doc/qt-5/qtextedit.html) html = self.ui.textEditQuickRef.toHtml() html = re.sub('font-size:\d+pt;', '', html) self.ui.textEditQuickRef.setHtml(html)
Allow the user to zoom in/out the quick reference text using Ctrl+Mouse Wheel
Allow the user to zoom in/out the quick reference text using Ctrl+Mouse Wheel See issue #1
Python
mit
ColinDuquesnoy/QRegexEditor
""" Contains the quick reference widget """ from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) Allow the user to zoom in/out the quick reference text using Ctrl+Mouse Wheel See issue #1
""" Contains the quick reference widget """ import re from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) self._fix_default_font_size() def _fix_default_font_size(self): # remove fixed font size to allow the user to zoom in/out using # Ctrl+Mouse Wheel # Note: Zooming into HTML documents only works if the font-size is not # set to a fixed size. # (source: http://qt-project.org/doc/qt-5/qtextedit.html) html = self.ui.textEditQuickRef.toHtml() html = re.sub('font-size:\d+pt;', '', html) self.ui.textEditQuickRef.setHtml(html)
<commit_before>""" Contains the quick reference widget """ from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) <commit_msg>Allow the user to zoom in/out the quick reference text using Ctrl+Mouse Wheel See issue #1<commit_after>
""" Contains the quick reference widget """ import re from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) self._fix_default_font_size() def _fix_default_font_size(self): # remove fixed font size to allow the user to zoom in/out using # Ctrl+Mouse Wheel # Note: Zooming into HTML documents only works if the font-size is not # set to a fixed size. # (source: http://qt-project.org/doc/qt-5/qtextedit.html) html = self.ui.textEditQuickRef.toHtml() html = re.sub('font-size:\d+pt;', '', html) self.ui.textEditQuickRef.setHtml(html)
""" Contains the quick reference widget """ from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) Allow the user to zoom in/out the quick reference text using Ctrl+Mouse Wheel See issue #1""" Contains the quick reference widget """ import re from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) self._fix_default_font_size() def _fix_default_font_size(self): # remove fixed font size to allow the user to zoom in/out using # Ctrl+Mouse Wheel # Note: Zooming into HTML documents only works if the font-size is not # set to a fixed size. # (source: http://qt-project.org/doc/qt-5/qtextedit.html) html = self.ui.textEditQuickRef.toHtml() html = re.sub('font-size:\d+pt;', '', html) self.ui.textEditQuickRef.setHtml(html)
<commit_before>""" Contains the quick reference widget """ from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) <commit_msg>Allow the user to zoom in/out the quick reference text using Ctrl+Mouse Wheel See issue #1<commit_after>""" Contains the quick reference widget """ import re from pyqode.qt import QtWidgets from .forms import quick_ref_ui class QuickRefWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(QuickRefWidget, self).__init__(parent) self.ui = quick_ref_ui.Ui_Form() self.ui.setupUi(self) self._fix_default_font_size() def _fix_default_font_size(self): # remove fixed font size to allow the user to zoom in/out using # Ctrl+Mouse Wheel # Note: Zooming into HTML documents only works if the font-size is not # set to a fixed size. # (source: http://qt-project.org/doc/qt-5/qtextedit.html) html = self.ui.textEditQuickRef.toHtml() html = re.sub('font-size:\d+pt;', '', html) self.ui.textEditQuickRef.setHtml(html)
d16988174f5570334b6b3986dbd0b35148566a62
opps/flatpages/models.py
opps/flatpages/models.py
# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
Add field short_url on flatpages model
Add field short_url on flatpages model
Python
mit
opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps
# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass Add field short_url on flatpages model
# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
<commit_before># -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass <commit_msg>Add field short_url on flatpages model<commit_after>
# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass Add field short_url on flatpages model# -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
<commit_before># -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass <commit_msg>Add field short_url on flatpages model<commit_after># -*- coding: utf-8 -*- from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
00bb631437fdf45c7a067da43aa042f8b1f6ef8e
osf_models/models/tag.py
osf_models/models/tag.py
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False)
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
Add unique together on _id and system
Add unique together on _id and system
Python
apache-2.0
Nesiehr/osf.io,adlius/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,crcresearch/osf.io,caneruguz/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,baylee-d/osf.io,leb2dg/osf.io,acshi/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,chrisseto/osf.io,chennan47/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,aaxelb/osf.io,chennan47/osf.io,caneruguz/osf.io,binoculars/osf.io,Nesiehr/osf.io,leb2dg/osf.io,caseyrollins/osf.io,adlius/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,TomBaxter/osf.io,erinspace/osf.io,sloria/osf.io,crcresearch/osf.io,binoculars/osf.io,felliott/osf.io,laurenrevere/osf.io,baylee-d/osf.io,leb2dg/osf.io,hmoco/osf.io,cwisecarver/osf.io,aaxelb/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,mfraezz/osf.io,mattclark/osf.io,binoculars/osf.io,mluo613/osf.io,icereval/osf.io,aaxelb/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,hmoco/osf.io,mfraezz/osf.io,cslzchen/osf.io,aaxelb/osf.io,alexschiller/osf.io,TomBaxter/osf.io,mluo613/osf.io,mluo613/osf.io,brianjgeiger/osf.io,icereval/osf.io,acshi/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,sloria/osf.io,pattisdr/osf.io,mfraezz/osf.io,pattisdr/osf.io,mluo613/osf.io,brianjgeiger/osf.io,acshi/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,cslzchen/osf.io,icereval/osf.io,adlius/osf.io,sloria/osf.io,TomBaxter/osf.io,hmoco/osf.io,acshi/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,mfraezz/osf.io,felliott/osf.io,hmoco/osf.io,caneruguz/osf.io,mluo613/osf.io,acshi/osf.io,Johnetordoff/osf.io,erinspace/osf.io,cwisecarver/osf.io,saradbowman/osf.io,saradbowman/osf.io,Nesiehr/osf.io,chrisseto/osf.io,felliott/osf.io,caseyrollins/osf.io,chennan47/osf.io,cslzchen/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) Add unique together on _id and system
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
<commit_before>from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) <commit_msg>Add unique together on _id and system<commit_after>
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) Add unique together on _id and systemfrom django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
<commit_before>from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) <commit_msg>Add unique together on _id and system<commit_after>from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
e203f76177de390fb02a2770499679c099c4f87c
cacheops/__init__.py
cacheops/__init__.py
VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support from .utils import debug_cache_key # noqa class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig'
VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig'
Remove debug_cache_key from entry point
Remove debug_cache_key from entry point
Python
bsd-3-clause
Suor/django-cacheops,LPgenerator/django-cacheops
VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support from .utils import debug_cache_key # noqa class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig' Remove debug_cache_key from entry point
VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig'
<commit_before>VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support from .utils import debug_cache_key # noqa class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig' <commit_msg>Remove debug_cache_key from entry point<commit_after>
VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig'
VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support from .utils import debug_cache_key # noqa class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig' Remove debug_cache_key from entry pointVERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig'
<commit_before>VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support from .utils import debug_cache_key # noqa class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig' <commit_msg>Remove debug_cache_key from entry point<commit_after>VERSION = (3, 0, 1) __version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2])) from django.apps import AppConfig from .simple import * from .query import * from .invalidation import * from .templatetags.cacheops import * from .transaction import install_cacheops_transaction_support class CacheopsConfig(AppConfig): name = 'cacheops' def ready(self): install_cacheops() install_cacheops_transaction_support() default_app_config = 'cacheops.CacheopsConfig'
a2ee2f617ae561ce5b905a6c3960371ce6f157e3
tests/test_open.py
tests/test_open.py
#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = self.WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
Fix 'open reference' test case
Fix 'open reference' test case
Python
mit
caleb531/youversion-suggest,caleb531/youversion-suggest
#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = self.WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17') Fix 'open reference' test case
#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
<commit_before>#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = self.WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17') <commit_msg>Fix 'open reference' test case<commit_after>
#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = self.WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17') Fix 'open reference' test case#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
<commit_before>#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = self.WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17') <commit_msg>Fix 'open reference' test case<commit_after>#!/usr/bin/env python import unittest import yv_suggest.open as yvs import inspect class WebbrowserMock(object): '''mock the builtin webbrowser module''' def open(self, url): '''mock the webbrowser.open() function''' self.url = url class OpenTestCase(unittest.TestCase): '''test the handling of Bible reference URLs''' def test_url(self): '''should build correct URL to Bible reference''' url = yvs.get_ref_url('esv/jhn.3.16') self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16') def test_query_param(self): '''should use received query parameter as default ref ID''' spec = inspect.getargspec(yvs.main) default_query_str = spec.defaults[0] self.assertEqual(default_query_str, '{query}') def test_url_open(self): '''should attempt to open URL using webbrowser module''' mock = WebbrowserMock() yvs.webbrowser = mock yvs.main('nlt/jhn.3.17') self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
c2d681f0df11d2111fe1ade63a0c045f9c9ebad7
aws_profile.py
aws_profile.py
#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
Fix profile script to correctly use the credentials file
Fix profile script to correctly use the credentials file
Python
mit
mivok/tools,mivok/tools,mivok/tools,mivok/tools
#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True) Fix profile script to correctly use the credentials file
#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
<commit_before>#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True) <commit_msg>Fix profile script to correctly use the credentials file<commit_after>
#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True) Fix profile script to correctly use the credentials file#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
<commit_before>#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/config')) section = sys.argv[1] cmd = sys.argv[2:] if section != 'default': section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True) <commit_msg>Fix profile script to correctly use the credentials file<commit_after>#!/usr/bin/env python # Reads a profile from ~/.aws/config and calls the command with # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY set correctly. import ConfigParser import os import subprocess import sys c = ConfigParser.SafeConfigParser() c.read(os.path.expanduser('~/.aws/credentials')) section = sys.argv[1] cmd = sys.argv[2:] #if section != 'default': # section = 'profile %s' % section os.environ['AWS_ACCESS_KEY_ID'] = c.get(section, 'aws_access_key_id') os.environ['AWS_SECRET_ACCESS_KEY'] = c.get(section, 'aws_secret_access_key') print os.environ['AWS_ACCESS_KEY_ID'] subprocess.call(' '.join(cmd), shell=True)
fb21faaec025a0a6ca2d98c8b2381902f3b1444a
pybug/align/lucaskanade/__init__.py
pybug/align/lucaskanade/__init__.py
import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation)
import appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
Add GaborFourier to default import
Add GaborFourier to default import
Python
bsd-3-clause
menpo/menpo,yuxiang-zhou/menpo,grigorisg9gr/menpo,mozata/menpo,mozata/menpo,mozata/menpo,mozata/menpo,grigorisg9gr/menpo,menpo/menpo,menpo/menpo,jabooth/menpo-archive,jabooth/menpo-archive,jabooth/menpo-archive,yuxiang-zhou/menpo,grigorisg9gr/menpo,patricksnape/menpo,yuxiang-zhou/menpo,jabooth/menpo-archive,patricksnape/menpo,patricksnape/menpo
import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation) Add GaborFourier to default import
import appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
<commit_before>import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation) <commit_msg>Add GaborFourier to default import<commit_after>
import appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation) Add GaborFourier to default importimport appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
<commit_before>import appearance import image from residual import (LSIntensity, ECC, GradientImages, GradientCorrelation) <commit_msg>Add GaborFourier to default import<commit_after>import appearance import image from residual import (LSIntensity, ECC, GaborFourier, GradientImages, GradientCorrelation)
e275fb1406f0a8e70bb3a9d4a50a82400f7e2c29
signac/gui/__init__.py
signac/gui/__init__.py
"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa except ImportError: warnings.warn("Failed to import PySide. " "gui will not be available.", ImportWarning) def main(): """Start signac-gui. The gui is only available if PySide is installed.""" raise ImportError( "You need to install PySide to use the gui.") else: from .gui import main __all__ = ['main']
"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa import pymongo # noqa except ImportError as error: msg = "{}. The signac gui is not available.".format(error) warnings.warn(msg, ImportWarning) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" raise ImportError(msg) else: from .gui import main __all__ = ['main']
Remove hard dependency for pymongo.
Remove hard dependency for pymongo. Caused by pulling the gui package into the signac namespace. Fixes issue #24.
Python
bsd-3-clause
csadorf/signac,csadorf/signac
"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa except ImportError: warnings.warn("Failed to import PySide. " "gui will not be available.", ImportWarning) def main(): """Start signac-gui. The gui is only available if PySide is installed.""" raise ImportError( "You need to install PySide to use the gui.") else: from .gui import main __all__ = ['main'] Remove hard dependency for pymongo. Caused by pulling the gui package into the signac namespace. Fixes issue #24.
"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa import pymongo # noqa except ImportError as error: msg = "{}. The signac gui is not available.".format(error) warnings.warn(msg, ImportWarning) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" raise ImportError(msg) else: from .gui import main __all__ = ['main']
<commit_before>"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa except ImportError: warnings.warn("Failed to import PySide. " "gui will not be available.", ImportWarning) def main(): """Start signac-gui. The gui is only available if PySide is installed.""" raise ImportError( "You need to install PySide to use the gui.") else: from .gui import main __all__ = ['main'] <commit_msg>Remove hard dependency for pymongo. Caused by pulling the gui package into the signac namespace. Fixes issue #24.<commit_after>
"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa import pymongo # noqa except ImportError as error: msg = "{}. The signac gui is not available.".format(error) warnings.warn(msg, ImportWarning) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" raise ImportError(msg) else: from .gui import main __all__ = ['main']
"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa except ImportError: warnings.warn("Failed to import PySide. " "gui will not be available.", ImportWarning) def main(): """Start signac-gui. The gui is only available if PySide is installed.""" raise ImportError( "You need to install PySide to use the gui.") else: from .gui import main __all__ = ['main'] Remove hard dependency for pymongo. Caused by pulling the gui package into the signac namespace. Fixes issue #24."""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa import pymongo # noqa except ImportError as error: msg = "{}. The signac gui is not available.".format(error) warnings.warn(msg, ImportWarning) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" raise ImportError(msg) else: from .gui import main __all__ = ['main']
<commit_before>"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa except ImportError: warnings.warn("Failed to import PySide. " "gui will not be available.", ImportWarning) def main(): """Start signac-gui. The gui is only available if PySide is installed.""" raise ImportError( "You need to install PySide to use the gui.") else: from .gui import main __all__ = ['main'] <commit_msg>Remove hard dependency for pymongo. Caused by pulling the gui package into the signac namespace. Fixes issue #24.<commit_after>"""Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import warnings try: import PySide # noqa import pymongo # noqa except ImportError as error: msg = "{}. The signac gui is not available.".format(error) warnings.warn(msg, ImportWarning) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" raise ImportError(msg) else: from .gui import main __all__ = ['main']
4da5ebbad11a5c5cdbea307668657d843d6d1005
cotracker/checkouts/middleware.py
cotracker/checkouts/middleware.py
"""Checkouts application middleware""" import logging logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def process_request(self, request): """Organizes info from each request and saves it to a log.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' template = "%(user)s@%(ip)s: %(method)s %(path)s \"%(useragent)s\"" logger.info(template % context)
"""Checkouts application middleware""" import logging import time logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def collect_request_details(self, request): """Gathers information of interest from the request and returns a dictionary.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' return context def process_request(self, request): """Captures the current time and saves it to the request object.""" request._analytics_start_time = time.time() def process_response(self, request, response): """Organizes info from each request/response and saves it to a log.""" context = self.collect_request_details(request) context['status'] = response.status_code if not request._analytics_start_time: logger.error("Unable to provide timing data for request") context['elapsed'] = -1.0 else: elapsed = (time.time() - request._analytics_start_time) * 1000.0 context['elapsed'] = elapsed template = "%(user)s@%(ip)s: %(method)s %(path)s %(elapsed)fms %(status)s \"%(useragent)s\"" logger.info(template % context) return response
Enhance analytics with timing and status code info
Enhance analytics with timing and status code info
Python
mit
eallrich/checkniner,eallrich/checkniner,eallrich/checkniner
"""Checkouts application middleware""" import logging logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def process_request(self, request): """Organizes info from each request and saves it to a log.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' template = "%(user)s@%(ip)s: %(method)s %(path)s \"%(useragent)s\"" logger.info(template % context) Enhance analytics with timing and status code info
"""Checkouts application middleware""" import logging import time logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def collect_request_details(self, request): """Gathers information of interest from the request and returns a dictionary.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' return context def process_request(self, request): """Captures the current time and saves it to the request object.""" request._analytics_start_time = time.time() def process_response(self, request, response): """Organizes info from each request/response and saves it to a log.""" context = self.collect_request_details(request) context['status'] = response.status_code if not request._analytics_start_time: logger.error("Unable to provide timing data for request") context['elapsed'] = -1.0 else: elapsed = (time.time() - request._analytics_start_time) * 1000.0 context['elapsed'] = elapsed template = "%(user)s@%(ip)s: %(method)s %(path)s %(elapsed)fms %(status)s \"%(useragent)s\"" logger.info(template % context) return response
<commit_before>"""Checkouts application middleware""" import logging logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def process_request(self, request): """Organizes info from each request and saves it to a log.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' template = "%(user)s@%(ip)s: %(method)s %(path)s \"%(useragent)s\"" logger.info(template % context) <commit_msg>Enhance analytics with timing and status code info<commit_after>
"""Checkouts application middleware""" import logging import time logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def collect_request_details(self, request): """Gathers information of interest from the request and returns a dictionary.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' return context def process_request(self, request): """Captures the current time and saves it to the request object.""" request._analytics_start_time = time.time() def process_response(self, request, response): """Organizes info from each request/response and saves it to a log.""" context = self.collect_request_details(request) context['status'] = response.status_code if not request._analytics_start_time: logger.error("Unable to provide timing data for request") context['elapsed'] = -1.0 else: elapsed = (time.time() - request._analytics_start_time) * 1000.0 context['elapsed'] = elapsed template = "%(user)s@%(ip)s: %(method)s %(path)s %(elapsed)fms %(status)s \"%(useragent)s\"" logger.info(template % context) return response
"""Checkouts application middleware""" import logging logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def process_request(self, request): """Organizes info from each request and saves it to a log.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' template = "%(user)s@%(ip)s: %(method)s %(path)s \"%(useragent)s\"" logger.info(template % context) Enhance analytics with timing and status code info"""Checkouts application middleware""" import logging import time logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def collect_request_details(self, request): """Gathers information of interest from the request and returns a dictionary.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' return context def process_request(self, request): """Captures the current time and saves it to the request object.""" request._analytics_start_time = time.time() def process_response(self, request, response): """Organizes info from each request/response and saves it to a log.""" context = self.collect_request_details(request) context['status'] = response.status_code if not request._analytics_start_time: logger.error("Unable to provide timing data for request") context['elapsed'] = -1.0 else: elapsed = (time.time() - request._analytics_start_time) * 1000.0 context['elapsed'] = elapsed template = "%(user)s@%(ip)s: %(method)s %(path)s %(elapsed)fms %(status)s \"%(useragent)s\"" logger.info(template % context) return response
<commit_before>"""Checkouts application middleware""" import logging logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def process_request(self, request): """Organizes info from each request and saves it to a log.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' template = "%(user)s@%(ip)s: %(method)s %(path)s \"%(useragent)s\"" logger.info(template % context) <commit_msg>Enhance analytics with timing and status code info<commit_after>"""Checkouts application middleware""" import logging import time logger = logging.getLogger('analytics') class Analytics(): """Tracks request details useful for analysis of usage patterns. To ensure that the name of the logged in user can be accessed, this middleware should come after Django's built-in AuthenticationMiddleware in the project settings. """ def collect_request_details(self, request): """Gathers information of interest from the request and returns a dictionary.""" context = { 'ip': request.META['REMOTE_ADDR'], 'method': request.method, 'path': request.path, 'user': request.user.username, 'useragent': request.META['HTTP_USER_AGENT'], } # Fall-back if the user is not recognized if not request.user.is_authenticated(): context['user'] = 'anonymous' return context def process_request(self, request): """Captures the current time and saves it to the request object.""" request._analytics_start_time = time.time() def process_response(self, request, response): """Organizes info from each request/response and saves it to a log.""" context = self.collect_request_details(request) context['status'] = response.status_code if not request._analytics_start_time: logger.error("Unable to provide timing data for request") context['elapsed'] = -1.0 else: elapsed = (time.time() - request._analytics_start_time) * 1000.0 context['elapsed'] = elapsed template = "%(user)s@%(ip)s: %(method)s %(path)s %(elapsed)fms %(status)s \"%(useragent)s\"" logger.info(template % context) return response
f566e0e36269ea2cd1e82c6af712097917effd4a
dlrn/migrations/versions/2d503b5034b7_rename_artifacts.py
dlrn/migrations/versions/2d503b5034b7_rename_artifacts.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', new_column_name='rpms')
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', existing_type=sa.Text(), new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', existing_type=sa.Text(), new_column_name='rpms')
Fix alembic migration for rpms->artifacts rename
Fix alembic migration for rpms->artifacts rename The migration does not work on MySQL-based engines, because it requires setting the existing_type parameter [1]. It worked fine on SQLite, though. [1] - https://alembic.sqlalchemy.org/en/latest/ops.html#alembic.operations.Operations.alter_column Change-Id: If0cc05af843e3db5f4b2e501caa8f4f773b24509
Python
apache-2.0
openstack-packages/delorean,openstack-packages/delorean,openstack-packages/DLRN,openstack-packages/DLRN
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', new_column_name='rpms') Fix alembic migration for rpms->artifacts rename The migration does not work on MySQL-based engines, because it requires setting the existing_type parameter [1]. It worked fine on SQLite, though. [1] - https://alembic.sqlalchemy.org/en/latest/ops.html#alembic.operations.Operations.alter_column Change-Id: If0cc05af843e3db5f4b2e501caa8f4f773b24509
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', existing_type=sa.Text(), new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', existing_type=sa.Text(), new_column_name='rpms')
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', new_column_name='rpms') <commit_msg>Fix alembic migration for rpms->artifacts rename The migration does not work on MySQL-based engines, because it requires setting the existing_type parameter [1]. It worked fine on SQLite, though. [1] - https://alembic.sqlalchemy.org/en/latest/ops.html#alembic.operations.Operations.alter_column Change-Id: If0cc05af843e3db5f4b2e501caa8f4f773b24509<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', existing_type=sa.Text(), new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', existing_type=sa.Text(), new_column_name='rpms')
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', new_column_name='rpms') Fix alembic migration for rpms->artifacts rename The migration does not work on MySQL-based engines, because it requires setting the existing_type parameter [1]. It worked fine on SQLite, though. [1] - https://alembic.sqlalchemy.org/en/latest/ops.html#alembic.operations.Operations.alter_column Change-Id: If0cc05af843e3db5f4b2e501caa8f4f773b24509# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', existing_type=sa.Text(), new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', existing_type=sa.Text(), new_column_name='rpms')
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', new_column_name='rpms') <commit_msg>Fix alembic migration for rpms->artifacts rename The migration does not work on MySQL-based engines, because it requires setting the existing_type parameter [1]. It worked fine on SQLite, though. [1] - https://alembic.sqlalchemy.org/en/latest/ops.html#alembic.operations.Operations.alter_column Change-Id: If0cc05af843e3db5f4b2e501caa8f4f773b24509<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename rpms to artifacts Revision ID: 2d503b5034b7 Revises: 2a0313a8a7d6 Create Date: 2019-04-26 01:06:50.462042 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '2d503b5034b7' down_revision = '2a0313a8a7d6' branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('rpms', existing_type=sa.Text(), new_column_name='artifacts') def downgrade(): with op.batch_alter_table("commits") as batch_op: batch_op.alter_column('artifacts', existing_type=sa.Text(), new_column_name='rpms')
51b7fba10a85136877c8a918d4d24d5a431a2a7f
mzalendo/run_all_tests_with_coverage.py
mzalendo/run_all_tests_with_coverage.py
#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage
#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')/*" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage
Correct the --omit parameter for coverage.py
Correct the --omit parameter for coverage.py Despite what some things on the web suggest, you seem to need to have a wildcard at the end of a path in the --omit list.
Python
agpl-3.0
hzj123/56th,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,hzj123/56th,patricmutwiri/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola,hzj123/56th,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,patricmutwiri/pombola
#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage Correct the --omit parameter for coverage.py Despite what some things on the web suggest, you seem to need to have a wildcard at the end of a path in the --omit list.
#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')/*" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage
<commit_before>#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage <commit_msg>Correct the --omit parameter for coverage.py Despite what some things on the web suggest, you seem to need to have a wildcard at the end of a path in the --omit list.<commit_after>
#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')/*" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage
#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage Correct the --omit parameter for coverage.py Despite what some things on the web suggest, you seem to need to have a wildcard at the end of a path in the --omit list.#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')/*" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage
<commit_before>#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage <commit_msg>Correct the --omit parameter for coverage.py Despite what some things on the web suggest, you seem to need to have a wildcard at the end of a path in the --omit list.<commit_after>#!/bin/bash find . -name '*.pyc' -delete coverage erase OMIT="$(python -c 'import sys; print sys.prefix')/*" coverage run --omit=$OMIT ./manage.py test \ core \ feedback \ hansard \ helpers \ images \ info \ scorecards \ search \ tasks \ user_profile coverage run --omit=$OMIT ./manage.py test --selenium-only \ core \ feedback \ user_profile coverage html -d mzalendo-coverage
fbd37fe6404bfc1e7cec4b2137c19e7323cdde02
street_score/project/urls.py
street_score/project/urls.py
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
Correct the url for the rating instance resource
Correct the url for the rating instance resource
Python
mit
openplans/streetscore,openplans/streetscore,openplans/streetscore
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') ) Correct the url for the rating instance resource
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
<commit_before>from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') ) <commit_msg>Correct the url for the rating instance resource<commit_after>
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') ) Correct the url for the rating instance resourcefrom django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
<commit_before>from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') ) <commit_msg>Correct the url for the rating instance resource<commit_after>from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
141e8303fe8f1d6fe554770d7480ef50797d4735
books/forms.py
books/forms.py
from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dividers in ISBN number) isbn = forms.CharField(max_length=20, label=_("ISBN")) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'isbn': _("ISBN"), 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'isbn': forms.TextInput(attrs={'required': 'required'}), 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data
from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dashes in ISBN) isbn = forms.CharField(max_length=20, label=_("ISBN"), widget=forms.TextInput( attrs={'required': 'required', 'pattern': '[0-9-]+', 'title': 'ISBN number'})) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data
Improve ISBN field in Book Type form
Improve ISBN field in Book Type form - Add required, pattern (to allow only numbers and dashes in HTML5-supporting browsers) and title properties - Remove a bit of redundant code
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dividers in ISBN number) isbn = forms.CharField(max_length=20, label=_("ISBN")) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'isbn': _("ISBN"), 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'isbn': forms.TextInput(attrs={'required': 'required'}), 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return dataImprove ISBN field in Book Type form - Add required, pattern (to allow only numbers and dashes in HTML5-supporting browsers) and title properties - Remove a bit of redundant code
from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dashes in ISBN) isbn = forms.CharField(max_length=20, label=_("ISBN"), widget=forms.TextInput( attrs={'required': 'required', 'pattern': '[0-9-]+', 'title': 'ISBN number'})) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data
<commit_before>from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dividers in ISBN number) isbn = forms.CharField(max_length=20, label=_("ISBN")) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'isbn': _("ISBN"), 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'isbn': forms.TextInput(attrs={'required': 'required'}), 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data<commit_msg>Improve ISBN field in Book Type form - Add required, pattern (to allow only numbers and dashes in HTML5-supporting browsers) and title properties - Remove a bit of redundant code<commit_after>
from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dashes in ISBN) isbn = forms.CharField(max_length=20, label=_("ISBN"), widget=forms.TextInput( attrs={'required': 'required', 'pattern': '[0-9-]+', 'title': 'ISBN number'})) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data
from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dividers in ISBN number) isbn = forms.CharField(max_length=20, label=_("ISBN")) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'isbn': _("ISBN"), 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'isbn': forms.TextInput(attrs={'required': 'required'}), 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return dataImprove ISBN field in Book Type form - Add required, pattern (to allow only numbers and dashes in HTML5-supporting browsers) and title properties - Remove a bit of redundant codefrom django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dashes in ISBN) isbn = forms.CharField(max_length=20, label=_("ISBN"), widget=forms.TextInput( attrs={'required': 'required', 'pattern': '[0-9-]+', 'title': 'ISBN number'})) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data
<commit_before>from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dividers in ISBN number) isbn = forms.CharField(max_length=20, label=_("ISBN")) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'isbn': _("ISBN"), 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'isbn': forms.TextInput(attrs={'required': 'required'}), 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data<commit_msg>Improve ISBN field in Book Type form - Add required, pattern (to allow only numbers and dashes in HTML5-supporting browsers) and title properties - Remove a bit of redundant code<commit_after>from django import forms from django.forms import ModelForm from django.utils.translation import ugettext_lazy as _ from books.models import BookType from egielda import settings class BookForm(ModelForm): # Different max_length than in model (to allow dashes in ISBN) isbn = forms.CharField(max_length=20, label=_("ISBN"), widget=forms.TextInput( attrs={'required': 'required', 'pattern': '[0-9-]+', 'title': 'ISBN number'})) class Meta: model = BookType fields = ['isbn', 'publisher', 'title', 'publication_year', 'price'] labels = { 'publisher': _("Publisher"), 'title': _("Title"), 'publication_year': _("Publication year"), 'price': _("Price (%s)") % getattr(settings, 'CURRENCY', 'USD'), } widgets = { 'publisher': forms.TextInput(attrs={'required': 'required'}), 'title': forms.TextInput(attrs={'required': 'required'}), 'publication_year': forms.NumberInput(attrs={'required': 'required', 'min': '1900', 'max': '2100'}), 'price': forms.NumberInput(attrs={'required': 'required', 'max': '999.99'}), } def clean_isbn(self): data = self.cleaned_data['isbn'] data = ''.join(filter(lambda x: x.isdigit(), data)) return data
779c01d3932c02f2b9c45e300c7efb54f81749e9
tests/rietveld/test_event_handler.py
tests/rietveld/test_event_handler.py
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
Add exception to test title
Add exception to test title
Python
mit
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover Add exception to test title
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
<commit_before>from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover <commit_msg>Add exception to test title<commit_after>
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover Add exception to test titlefrom __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
<commit_before>from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover <commit_msg>Add exception to test title<commit_after>from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
8afb48e23b91efa3432ffad568002a46384eb021
fantasyland.py
fantasyland.py
import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))
import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
Add command line interface to vary num-games and num-cards.
Add command line interface to vary num-games and num-cards.
Python
mit
session-id/pineapple-ai
import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))Add command line interface to vary num-games and num-cards.
import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
<commit_before>import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))<commit_msg>Add command line interface to vary num-games and num-cards.<commit_after>
import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))Add command line interface to vary num-games and num-cards.import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
<commit_before>import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))<commit_msg>Add command line interface to vary num-games and num-cards.<commit_after>import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
5f67934da00ff36044e9fd620b690e36968570c0
salt/output/overstatestage.py
salt/output/overstatestage.py
''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}:{2}\n'.format(colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}:{2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr
''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}: {2}\n'.format( colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}: {2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr
Make stage outputter a little cleaner
Make stage outputter a little cleaner
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}:{2}\n'.format(colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}:{2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr Make stage outputter a little cleaner
''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}: {2}\n'.format( colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}: {2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr
<commit_before>''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}:{2}\n'.format(colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}:{2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr <commit_msg>Make stage outputter a little cleaner<commit_after>
''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}: {2}\n'.format( colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}: {2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr
''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}:{2}\n'.format(colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}:{2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr Make stage outputter a little cleaner''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}: {2}\n'.format( colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}: {2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr
<commit_before>''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}:{2}\n'.format(colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}:{2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr <commit_msg>Make stage outputter a little cleaner<commit_after>''' Display clean output of an overstate stage ''' #[{'group2': {'match': ['fedora17-2', 'fedora17-3'], # 'require': ['group1'], # 'sls': ['nginx', 'edit']} # } # ] # Import Salt libs import salt.utils def output(data): ''' Format the data for printing stage information from the overstate system ''' colors = salt.utils.get_colors(__opts__.get('color')) ostr = '' for comp in data: for name, stage in comp.items(): ostr += '{0}{1}: {2}\n'.format( colors['LIGHT_BLUE'], name, colors['ENDC']) for key in sorted(stage): ostr += ' {0}{1}: {2}{3}\n'.format( colors['LIGHT_BLUE'], key, stage[key], colors['ENDC']) return ostr
4b8de0203d2eec87c2d05c3521df8af3365f73a4
IPython/testing/nose_assert_methods.py
IPython/testing/nose_assert_methods.py
"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in
"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in def assert_not_in(item, collection): assert item not in collection, '%r in %r' % (item, collection) if not hasattr(nt, 'assert_not_in'): nt.assert_not_in = assert_not_in
Add assert_not_in method for Python2.6
Add assert_not_in method for Python2.6
Python
bsd-3-clause
ipython/ipython,ipython/ipython
"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in Add assert_not_in method for Python2.6
"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in def assert_not_in(item, collection): assert item not in collection, '%r in %r' % (item, collection) if not hasattr(nt, 'assert_not_in'): nt.assert_not_in = assert_not_in
<commit_before>"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in <commit_msg>Add assert_not_in method for Python2.6<commit_after>
"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in def assert_not_in(item, collection): assert item not in collection, '%r in %r' % (item, collection) if not hasattr(nt, 'assert_not_in'): nt.assert_not_in = assert_not_in
"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in Add assert_not_in method for Python2.6"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in def assert_not_in(item, collection): assert item not in collection, '%r in %r' % (item, collection) if not hasattr(nt, 'assert_not_in'): nt.assert_not_in = assert_not_in
<commit_before>"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in <commit_msg>Add assert_not_in method for Python2.6<commit_after>"""Add some assert methods to nose.tools. These were added in Python 2.7/3.1, so once we stop testing on Python 2.6, this file can be removed. """ import nose.tools as nt def assert_in(item, collection): assert item in collection, '%r not in %r' % (item, collection) if not hasattr(nt, 'assert_in'): nt.assert_in = assert_in def assert_not_in(item, collection): assert item not in collection, '%r in %r' % (item, collection) if not hasattr(nt, 'assert_not_in'): nt.assert_not_in = assert_not_in
6ad4a0d511f874ccb94a6c8b02f0d4f5e99947ee
bigbuild/management/commands/cachepages.py
bigbuild/management/commands/cachepages.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with open(archive_cache_path, 'w') as f: json.dump( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), f, default=serializer )
#!/usr/bin/env python # -*- coding: utf-8 -*- import io import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with io.open(archive_cache_path, 'w', encoding='utf8') as f: data = json.dumps( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), default=serializer, ensure_ascii=False ) f.write(unicode(data))
Write out page cache as unicode utf8
Write out page cache as unicode utf8
Python
mit
datadesk/django-bigbuild,datadesk/django-bigbuild,datadesk/django-bigbuild
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with open(archive_cache_path, 'w') as f: json.dump( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), f, default=serializer ) Write out page cache as unicode utf8
#!/usr/bin/env python # -*- coding: utf-8 -*- import io import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with io.open(archive_cache_path, 'w', encoding='utf8') as f: data = json.dumps( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), default=serializer, ensure_ascii=False ) f.write(unicode(data))
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with open(archive_cache_path, 'w') as f: json.dump( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), f, default=serializer ) <commit_msg>Write out page cache as unicode utf8<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import io import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with io.open(archive_cache_path, 'w', encoding='utf8') as f: data = json.dumps( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), default=serializer, ensure_ascii=False ) f.write(unicode(data))
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with open(archive_cache_path, 'w') as f: json.dump( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), f, default=serializer ) Write out page cache as unicode utf8#!/usr/bin/env python # -*- coding: utf-8 -*- import io import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with io.open(archive_cache_path, 'w', encoding='utf8') as f: data = json.dumps( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), default=serializer, ensure_ascii=False ) f.write(unicode(data))
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with open(archive_cache_path, 'w') as f: json.dump( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), f, default=serializer ) <commit_msg>Write out page cache as unicode utf8<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import io import os import json from datetime import datetime from bigbuild.models import PageList from bigbuild import get_archive_directory from django.core.management.base import BaseCommand def serializer(obj): """ JSON serializer for objects not serializable by default json code """ if isinstance(obj, datetime): serial = obj.isoformat() return serial raise TypeError("Type not serializable") class Command(BaseCommand): help = "Cache page metadata to increase the application speed" def handle(self, *args, **options): # Set the cache path for archived pages archive_cache_path = os.path.join(get_archive_directory(), '.cache') # Delete it if it already exists if os.path.exists(archive_cache_path): os.remove(archive_cache_path) # Pull the live PageList from the YAML files page_list = PageList() # Save the archived pages out to a new cache with io.open(archive_cache_path, 'w', encoding='utf8') as f: data = json.dumps( dict(archived_pages=[p.to_json() for p in page_list.archived_pages]), default=serializer, ensure_ascii=False ) f.write(unicode(data))
751819ea58389eaa1baf3de243459be4948b15f1
rpc_client/rpc_client_tests.py
rpc_client/rpc_client_tests.py
import unittest class Test(unittest.TestCase): def setUp(self): self.seq = range(10) #def test_shuffle(self): # # make sure the shuffled sequence does not lose any elements # random.shuffle(self.seq) # self.seq.sort() # self.assertEqual(self.seq, range(10)) #def test_choice(self): # element = random.choice(self.seq) # self.assertTrue(element in self.seq) #def test_sample(self): # self.assertRaises(ValueError, random.sample, self.seq, 20) # for element in random.sample(self.seq, 5): # self.assertTrue(element in self.seq) if __name__ == '__main__': unittest.main()
import unittest, xmlrpclib, couchdb from ConfigParser import SafeConfigParser class Test(unittest.TestCase): def setUp(self): self.cfg = SafeConfigParser() self.cfg.read(('rpc_client.ini', '../stats/basicStats.ini')) host = self.cfg.get('connection', 'server') port = self.cfg.getint('connection', 'port') connString = 'http://%s:%i' % (host, port) self.sp = xmlrpclib.ServerProxy(connString) self.projName = 'test_project_abracadabra' def test_projectExists(self): self.assertFalse( self.sp.projectExists(self.projName) ) def test_createAndDeleteProject(self): self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) self.assertTrue( self.sp.projectExists(self.projName) ) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) def test_createViews(self): if self.sp.projectExists(self.projName): self.sp.createProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) server = couchdb.Server(self.cfg.get('couchdb', 'dbaddress')) db = server[self.projName] self.assertTrue('_design/manage' in db) self.assertTrue('_design/basicStats' in db) self.assertTrue('jobs' in db['_design/manage']['views']) self.assertTrue('addCar' in db['_design/basicStats']['views']) self.assertTrue('deleteCar' in db['_design/basicStats']['views']) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) if __name__ == '__main__': unittest.main()
Make some simple unit test for RPC server.
Make some simple unit test for RPC server.
Python
apache-2.0
anthony-kolesov/kts46,anthony-kolesov/kts46,anthony-kolesov/kts46,anthony-kolesov/kts46
import unittest class Test(unittest.TestCase): def setUp(self): self.seq = range(10) #def test_shuffle(self): # # make sure the shuffled sequence does not lose any elements # random.shuffle(self.seq) # self.seq.sort() # self.assertEqual(self.seq, range(10)) #def test_choice(self): # element = random.choice(self.seq) # self.assertTrue(element in self.seq) #def test_sample(self): # self.assertRaises(ValueError, random.sample, self.seq, 20) # for element in random.sample(self.seq, 5): # self.assertTrue(element in self.seq) if __name__ == '__main__': unittest.main() Make some simple unit test for RPC server.
import unittest, xmlrpclib, couchdb from ConfigParser import SafeConfigParser class Test(unittest.TestCase): def setUp(self): self.cfg = SafeConfigParser() self.cfg.read(('rpc_client.ini', '../stats/basicStats.ini')) host = self.cfg.get('connection', 'server') port = self.cfg.getint('connection', 'port') connString = 'http://%s:%i' % (host, port) self.sp = xmlrpclib.ServerProxy(connString) self.projName = 'test_project_abracadabra' def test_projectExists(self): self.assertFalse( self.sp.projectExists(self.projName) ) def test_createAndDeleteProject(self): self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) self.assertTrue( self.sp.projectExists(self.projName) ) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) def test_createViews(self): if self.sp.projectExists(self.projName): self.sp.createProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) server = couchdb.Server(self.cfg.get('couchdb', 'dbaddress')) db = server[self.projName] self.assertTrue('_design/manage' in db) self.assertTrue('_design/basicStats' in db) self.assertTrue('jobs' in db['_design/manage']['views']) self.assertTrue('addCar' in db['_design/basicStats']['views']) self.assertTrue('deleteCar' in db['_design/basicStats']['views']) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) if __name__ == '__main__': unittest.main()
<commit_before>import unittest class Test(unittest.TestCase): def setUp(self): self.seq = range(10) #def test_shuffle(self): # # make sure the shuffled sequence does not lose any elements # random.shuffle(self.seq) # self.seq.sort() # self.assertEqual(self.seq, range(10)) #def test_choice(self): # element = random.choice(self.seq) # self.assertTrue(element in self.seq) #def test_sample(self): # self.assertRaises(ValueError, random.sample, self.seq, 20) # for element in random.sample(self.seq, 5): # self.assertTrue(element in self.seq) if __name__ == '__main__': unittest.main() <commit_msg>Make some simple unit test for RPC server.<commit_after>
import unittest, xmlrpclib, couchdb from ConfigParser import SafeConfigParser class Test(unittest.TestCase): def setUp(self): self.cfg = SafeConfigParser() self.cfg.read(('rpc_client.ini', '../stats/basicStats.ini')) host = self.cfg.get('connection', 'server') port = self.cfg.getint('connection', 'port') connString = 'http://%s:%i' % (host, port) self.sp = xmlrpclib.ServerProxy(connString) self.projName = 'test_project_abracadabra' def test_projectExists(self): self.assertFalse( self.sp.projectExists(self.projName) ) def test_createAndDeleteProject(self): self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) self.assertTrue( self.sp.projectExists(self.projName) ) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) def test_createViews(self): if self.sp.projectExists(self.projName): self.sp.createProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) server = couchdb.Server(self.cfg.get('couchdb', 'dbaddress')) db = server[self.projName] self.assertTrue('_design/manage' in db) self.assertTrue('_design/basicStats' in db) self.assertTrue('jobs' in db['_design/manage']['views']) self.assertTrue('addCar' in db['_design/basicStats']['views']) self.assertTrue('deleteCar' in db['_design/basicStats']['views']) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) if __name__ == '__main__': unittest.main()
import unittest class Test(unittest.TestCase): def setUp(self): self.seq = range(10) #def test_shuffle(self): # # make sure the shuffled sequence does not lose any elements # random.shuffle(self.seq) # self.seq.sort() # self.assertEqual(self.seq, range(10)) #def test_choice(self): # element = random.choice(self.seq) # self.assertTrue(element in self.seq) #def test_sample(self): # self.assertRaises(ValueError, random.sample, self.seq, 20) # for element in random.sample(self.seq, 5): # self.assertTrue(element in self.seq) if __name__ == '__main__': unittest.main() Make some simple unit test for RPC server.import unittest, xmlrpclib, couchdb from ConfigParser import SafeConfigParser class Test(unittest.TestCase): def setUp(self): self.cfg = SafeConfigParser() self.cfg.read(('rpc_client.ini', '../stats/basicStats.ini')) host = self.cfg.get('connection', 'server') port = self.cfg.getint('connection', 'port') connString = 'http://%s:%i' % (host, port) self.sp = xmlrpclib.ServerProxy(connString) self.projName = 'test_project_abracadabra' def test_projectExists(self): self.assertFalse( self.sp.projectExists(self.projName) ) def test_createAndDeleteProject(self): self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) self.assertTrue( self.sp.projectExists(self.projName) ) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) def test_createViews(self): if self.sp.projectExists(self.projName): self.sp.createProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) server = couchdb.Server(self.cfg.get('couchdb', 'dbaddress')) db = server[self.projName] self.assertTrue('_design/manage' in db) self.assertTrue('_design/basicStats' in db) self.assertTrue('jobs' in db['_design/manage']['views']) self.assertTrue('addCar' in db['_design/basicStats']['views']) self.assertTrue('deleteCar' in db['_design/basicStats']['views']) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) if __name__ == '__main__': unittest.main()
<commit_before>import unittest class Test(unittest.TestCase): def setUp(self): self.seq = range(10) #def test_shuffle(self): # # make sure the shuffled sequence does not lose any elements # random.shuffle(self.seq) # self.seq.sort() # self.assertEqual(self.seq, range(10)) #def test_choice(self): # element = random.choice(self.seq) # self.assertTrue(element in self.seq) #def test_sample(self): # self.assertRaises(ValueError, random.sample, self.seq, 20) # for element in random.sample(self.seq, 5): # self.assertTrue(element in self.seq) if __name__ == '__main__': unittest.main() <commit_msg>Make some simple unit test for RPC server.<commit_after>import unittest, xmlrpclib, couchdb from ConfigParser import SafeConfigParser class Test(unittest.TestCase): def setUp(self): self.cfg = SafeConfigParser() self.cfg.read(('rpc_client.ini', '../stats/basicStats.ini')) host = self.cfg.get('connection', 'server') port = self.cfg.getint('connection', 'port') connString = 'http://%s:%i' % (host, port) self.sp = xmlrpclib.ServerProxy(connString) self.projName = 'test_project_abracadabra' def test_projectExists(self): self.assertFalse( self.sp.projectExists(self.projName) ) def test_createAndDeleteProject(self): self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) self.assertTrue( self.sp.projectExists(self.projName) ) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) def test_createViews(self): if self.sp.projectExists(self.projName): self.sp.createProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) self.sp.createProject(self.projName) server = couchdb.Server(self.cfg.get('couchdb', 'dbaddress')) db = server[self.projName] self.assertTrue('_design/manage' in db) self.assertTrue('_design/basicStats' in db) self.assertTrue('jobs' in db['_design/manage']['views']) self.assertTrue('addCar' in db['_design/basicStats']['views']) self.assertTrue('deleteCar' in db['_design/basicStats']['views']) self.sp.deleteProject(self.projName) self.assertFalse( self.sp.projectExists(self.projName) ) if __name__ == '__main__': unittest.main()
73609d84871973449e2e4520fdeac027131d0e6d
tests/list_pubs.py
tests/list_pubs.py
import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.04') for version in versions: print(version['name'])
import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.10') for version in versions: print(version['name'])
Fix ubuntu version in test prog
Fix ubuntu version in test prog
Python
mit
gbowerman/azurerm
import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.04') for version in versions: print(version['name']) Fix ubuntu version in test prog
import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.10') for version in versions: print(version['name'])
<commit_before>import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.04') for version in versions: print(version['name']) <commit_msg>Fix ubuntu version in test prog<commit_after>
import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.10') for version in versions: print(version['name'])
import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.04') for version in versions: print(version['name']) Fix ubuntu version in test progimport azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.10') for version in versions: print(version['name'])
<commit_before>import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.04') for version in versions: print(version['name']) <commit_msg>Fix ubuntu version in test prog<commit_after>import azurerm import json # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting vmssConfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) ''' pubs = azurerm.list_publishers(access_token, subscription_id, 'southeastasia') for pub in pubs: print(pub['name']) offers = azurerm.list_offers(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer') for offer in offers: print(offer['name']) skus = azurerm.list_skus(access_token, subscription_id, 'southeastasia', 'MicrosoftWindowsServer', 'WindowsServer') for sku in skus: print(sku['name']) ''' #versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'MicrosoftWindowsServer', 'WindowsServer', '2012-R2-Datacenter') versions = azurerm.list_sku_versions(access_token, subscription_id, 'eastus', 'Canonical', 'UbuntuServer', '15.10') for version in versions: print(version['name'])
377d0634a77c63ce9e3d937f31bdd82ebe695cbb
ev3dev/auto.py
ev3dev/auto.py
import platform # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
import platform import sys # ----------------------------------------------------------------------------- if sys.version_info < (3,4): raise SystemError('Must be using Python 3.4 or higher') # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' # ----------------------------------------------------------------------------- if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
Enforce the use of Python 3.4 or higher
Enforce the use of Python 3.4 or higher
Python
mit
rhempel/ev3dev-lang-python,dwalton76/ev3dev-lang-python,dwalton76/ev3dev-lang-python
import platform # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import * Enforce the use of Python 3.4 or higher
import platform import sys # ----------------------------------------------------------------------------- if sys.version_info < (3,4): raise SystemError('Must be using Python 3.4 or higher') # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' # ----------------------------------------------------------------------------- if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
<commit_before>import platform # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import * <commit_msg>Enforce the use of Python 3.4 or higher<commit_after>
import platform import sys # ----------------------------------------------------------------------------- if sys.version_info < (3,4): raise SystemError('Must be using Python 3.4 or higher') # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' # ----------------------------------------------------------------------------- if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
import platform # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import * Enforce the use of Python 3.4 or higherimport platform import sys # ----------------------------------------------------------------------------- if sys.version_info < (3,4): raise SystemError('Must be using Python 3.4 or higher') # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' # ----------------------------------------------------------------------------- if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
<commit_before>import platform # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import * <commit_msg>Enforce the use of Python 3.4 or higher<commit_after>import platform import sys # ----------------------------------------------------------------------------- if sys.version_info < (3,4): raise SystemError('Must be using Python 3.4 or higher') # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platform.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' # ----------------------------------------------------------------------------- if current_platform() == 'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
122507fb2c68a0d5734082bdc67ce7cf91293870
python/sierrapy/fastareader.py
python/sierrapy/fastareader.py
# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' if line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences
# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' elif line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences
Fix a bug which prepend the header to NA sequence
Fix a bug which prepend the header to NA sequence
Python
mit
hivdb/sierra-client
# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' if line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences Fix a bug which prepend the header to NA sequence
# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' elif line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences
<commit_before># -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' if line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences <commit_msg>Fix a bug which prepend the header to NA sequence<commit_after>
# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' elif line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences
# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' if line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences Fix a bug which prepend the header to NA sequence# -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' elif line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences
<commit_before># -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' if line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences <commit_msg>Fix a bug which prepend the header to NA sequence<commit_after># -*- coding: utf-8 -*- def load(fp): sequences = [] header = None curseq = '' for line in fp: if line.startswith('>'): if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) header = line[1:].strip() curseq = '' elif line.startswith('#'): continue else: curseq += line.strip() if header and curseq: sequences.append({ 'header': header, 'sequence': curseq }) return sequences
9856361b48bb481f7913eaf69be668225c5bb818
api/files/urls.py
api/files/urls.py
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
Remove the files list endpoint
Remove the files list endpoint
Python
apache-2.0
acshi/osf.io,aaxelb/osf.io,cslzchen/osf.io,felliott/osf.io,brandonPurvis/osf.io,wearpants/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,abought/osf.io,TomBaxter/osf.io,SSJohns/osf.io,amyshi188/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,erinspace/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,cosenal/osf.io,jnayak1/osf.io,billyhunt/osf.io,wearpants/osf.io,mfraezz/osf.io,adlius/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,mluo613/osf.io,zachjanicki/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,rdhyee/osf.io,mattclark/osf.io,GageGaskins/osf.io,caneruguz/osf.io,alexschiller/osf.io,chrisseto/osf.io,crcresearch/osf.io,abought/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,alexschiller/osf.io,wearpants/osf.io,felliott/osf.io,njantrania/osf.io,chrisseto/osf.io,crcresearch/osf.io,kwierman/osf.io,amyshi188/osf.io,SSJohns/osf.io,ticklemepierce/osf.io,njantrania/osf.io,laurenrevere/osf.io,abought/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,kwierman/osf.io,Ghalko/osf.io,adlius/osf.io,emetsger/osf.io,samchrisinger/osf.io,icereval/osf.io,jnayak1/osf.io,Ghalko/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,zamattiac/osf.io,caseyrollins/osf.io,doublebits/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,abought/osf.io,samanehsan/osf.io,cwisecarver/osf.io,acshi/osf.io,hmoco/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,danielneis/osf.io,mfraezz/osf.io,cosenal/osf.io,jnayak1/osf.io,leb2dg/osf.io,danielneis/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,leb2dg/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,cosenal/osf.io,mattclark/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,samchrisinger/osf.io,chennan47/osf.io,brianjgeiger/osf.io,wearpants/osf.io,erinspace/osf.io,mluo613/osf.io,danielneis/osf.io,KAsante95/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,billyhunt/osf.io,kwierman/osf.io,samchrisinger/osf.io,arpitar/osf.io,emetsger/osf.io,felliott/osf.io,samanehsan/osf.io,Nesiehr/osf.io,saradbowman/osf.io,adlius/osf.io,RomanZWang/osf.io,danielneis/osf.io,binoculars/osf.io,zamattiac/osf.io,TomBaxter/osf.io,petermalcolm/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,acshi/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,Ghalko/osf.io,amyshi188/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,emetsger/osf.io,baylee-d/osf.io,petermalcolm/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,ticklemepierce/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,SSJohns/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,rdhyee/osf.io,icereval/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,chennan47/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,petermalcolm/osf.io,saradbowman/osf.io,hmoco/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,arpitar/osf.io,doublebits/osf.io,acshi/osf.io,KAsante95/osf.io,KAsante95/osf.io,sloria/osf.io,emetsger/osf.io,kch8qx/osf.io,hmoco/osf.io,adlius/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,hmoco/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,mfraezz/osf.io,kch8qx/osf.io,SSJohns/osf.io,arpitar/osf.io,TomBaxter/osf.io,aaxelb/osf.io,mfraezz/osf.io,samanehsan/osf.io,caseyrygt/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,mluo613/osf.io,caneruguz/osf.io,billyhunt/osf.io,pattisdr/osf.io,RomanZWang/osf.io,aaxelb/osf.io,samanehsan/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,binoculars/osf.io,KAsante95/osf.io,arpitar/osf.io,alexschiller/osf.io,kch8qx/osf.io,mluo613/osf.io,njantrania/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,binoculars/osf.io,mattclark/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,kch8qx/osf.io,doublebits/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,GageGaskins/osf.io,billyhunt/osf.io,sloria/osf.io,asanfilippo7/osf.io,pattisdr/osf.io,doublebits/osf.io,mluke93/osf.io,sloria/osf.io,caseyrygt/osf.io,alexschiller/osf.io,rdhyee/osf.io,rdhyee/osf.io,Nesiehr/osf.io,mluke93/osf.io,caseyrygt/osf.io,icereval/osf.io,caneruguz/osf.io
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ] Remove the files list endpoint
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
<commit_before>from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ] <commit_msg>Remove the files list endpoint<commit_after>
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ] Remove the files list endpointfrom django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
<commit_before>from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^$', views.FileList.as_view(), name='file-list'), url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ] <commit_msg>Remove the files list endpoint<commit_after>from django.conf.urls import url from api.files import views urlpatterns = [ url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name='file-detail'), url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name='file-versions'), url(r'^(?P<file_id>\w+)/versions/(?P<version_id>\w+)/$', views.FileVersionDetail.as_view(), name='version-detail'), ]
648b128542f737da37dc696a02bd71ace6dbb28c
tests/test_deps.py
tests/test_deps.py
import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import sentinels projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import sentinels']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - sentinels', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import sentinels']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x
import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import pact projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import pact']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - pact', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import pact']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x
Change import name in deps test
Change import name in deps test
Python
bsd-3-clause
getweber/weber-cli
import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import sentinels projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import sentinels']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - sentinels', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import sentinels']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x Change import name in deps test
import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import pact projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import pact']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - pact', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import pact']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x
<commit_before>import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import sentinels projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import sentinels']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - sentinels', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import sentinels']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x <commit_msg>Change import name in deps test<commit_after>
import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import pact projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import pact']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - pact', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import pact']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x
import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import sentinels projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import sentinels']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - sentinels', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import sentinels']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x Change import name in deps testimport os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import pact projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import pact']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - pact', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import pact']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x
<commit_before>import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import sentinels projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import sentinels']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - sentinels', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import sentinels']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x <commit_msg>Change import name in deps test<commit_after>import os import subprocess import sys import pytest def test_adding_deps(tmpdir): with pytest.raises(ImportError): import pact projdir = tmpdir.join('proj') yaml = projdir.join('.cob-project.yml') python = str(projdir.join('.cob/env/bin/python')) with yaml.open('a', ensure=True) as f: print('name: testproj', file=f) _cob_on(projdir, 'bootstrap') assert os.path.exists(python) assert subprocess.call([python, '-c', 'import pact']) == 1 with yaml.open('a') as f: print('deps:', file=f) print(' - pact', file=f) _cob_on(projdir, 'bootstrap') assert subprocess.call([python, '-c', 'import pact']) == 0 def _cob_on(cwd, cmd): x = os.environ.pop('COB_NO_REENTRY') try: subprocess.check_call([sys.executable, '-m', 'cob.cli.main', '-vvvvv', str(cmd)], cwd=str(cwd)) finally: os.environ['COB_NO_REENTRY'] = x
48f643b99c93fc10c042fe10e4a06c64df245d0d
lobster/cmssw/actions.py
lobster/cmssw/actions.py
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots() self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
Allow users to add foremen plots for daemonized plotting.
Allow users to add foremen plots for daemonized plotting.
Python
mit
matz-e/lobster,matz-e/lobster,matz-e/lobster
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots() self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now Allow users to add foremen plots for daemonized plotting.
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
<commit_before>import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots() self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now <commit_msg>Allow users to add foremen plots for daemonized plotting.<commit_after>
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots() self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now Allow users to add foremen plots for daemonized plotting.import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
<commit_before>import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots() self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now <commit_msg>Allow users to add foremen plots for daemonized plotting.<commit_after>import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
2f2f6cba331515b8d563d0e2f7869111df4227c3
txlege84/core/management/commands/updateconveningtimes.py
txlege84/core/management/commands/updateconveningtimes.py
from django.core.management.base import BaseCommand from core.models import ConveneTime from legislators.models import Chamber import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, } ) self.stdout.write(u'Now set to: {}'.format(time_string))
from django.core.management.base import BaseCommand from django.utils import timezone from core.models import ConveneTime from legislators.models import Chamber from dateutil.parser import parse import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] status, time = time_string.split(' until ') time = timezone.make_aware( parse(time.replace(' noon', '')), timezone.get_default_timezone()) ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, 'status': status, 'time': time, } ) self.stdout.write(u'Now set to: {}'.format(time_string))
Update scraper to account for new fields
Update scraper to account for new fields
Python
mit
texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84
from django.core.management.base import BaseCommand from core.models import ConveneTime from legislators.models import Chamber import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, } ) self.stdout.write(u'Now set to: {}'.format(time_string)) Update scraper to account for new fields
from django.core.management.base import BaseCommand from django.utils import timezone from core.models import ConveneTime from legislators.models import Chamber from dateutil.parser import parse import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] status, time = time_string.split(' until ') time = timezone.make_aware( parse(time.replace(' noon', '')), timezone.get_default_timezone()) ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, 'status': status, 'time': time, } ) self.stdout.write(u'Now set to: {}'.format(time_string))
<commit_before>from django.core.management.base import BaseCommand from core.models import ConveneTime from legislators.models import Chamber import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, } ) self.stdout.write(u'Now set to: {}'.format(time_string)) <commit_msg>Update scraper to account for new fields<commit_after>
from django.core.management.base import BaseCommand from django.utils import timezone from core.models import ConveneTime from legislators.models import Chamber from dateutil.parser import parse import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] status, time = time_string.split(' until ') time = timezone.make_aware( parse(time.replace(' noon', '')), timezone.get_default_timezone()) ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, 'status': status, 'time': time, } ) self.stdout.write(u'Now set to: {}'.format(time_string))
from django.core.management.base import BaseCommand from core.models import ConveneTime from legislators.models import Chamber import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, } ) self.stdout.write(u'Now set to: {}'.format(time_string)) Update scraper to account for new fieldsfrom django.core.management.base import BaseCommand from django.utils import timezone from core.models import ConveneTime from legislators.models import Chamber from dateutil.parser import parse import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] status, time = time_string.split(' until ') time = timezone.make_aware( parse(time.replace(' noon', '')), timezone.get_default_timezone()) ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, 'status': status, 'time': time, } ) self.stdout.write(u'Now set to: {}'.format(time_string))
<commit_before>from django.core.management.base import BaseCommand from core.models import ConveneTime from legislators.models import Chamber import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, } ) self.stdout.write(u'Now set to: {}'.format(time_string)) <commit_msg>Update scraper to account for new fields<commit_after>from django.core.management.base import BaseCommand from django.utils import timezone from core.models import ConveneTime from legislators.models import Chamber from dateutil.parser import parse import requests class Command(BaseCommand): help = u'Scrape TLO for convening times.' def handle(self, *args, **kwargs): self.update_time('House') self.update_time('Senate') def update_time(self, chamber): self.stdout.write(u'Updating {} convening time...'.format(chamber)) page = requests.get('http://www.capitol.state.tx.us/tlodocs' '/SessionTime/{}SessTime.js'.format(chamber)) time_string = page.text.strip()[16:-3] status, time = time_string.split(' until ') time = timezone.make_aware( parse(time.replace(' noon', '')), timezone.get_default_timezone()) ConveneTime.objects.update_or_create( chamber=Chamber.objects.get(name='Texas {}'.format(chamber)), defaults={ 'time_string': time_string, 'status': status, 'time': time, } ) self.stdout.write(u'Now set to: {}'.format(time_string))
d9f10a5ac329522f06f41cb52ed113b0703b87a1
syncplay/__init__.py
syncplay/__init__.py
version = '1.3.0' milestone = 'Chami' release_number = '4' projectURL = 'http://syncplay.pl/'
version = '1.3.0' milestone = 'Chami' release_number = '5' projectURL = 'http://syncplay.pl/'
Increase release number to 5 (1.3.0 Beta 3a)
Increase release number to 5 (1.3.0 Beta 3a)
Python
apache-2.0
NeverDecaf/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay,alby128/syncplay,NeverDecaf/syncplay
version = '1.3.0' milestone = 'Chami' release_number = '4' projectURL = 'http://syncplay.pl/' Increase release number to 5 (1.3.0 Beta 3a)
version = '1.3.0' milestone = 'Chami' release_number = '5' projectURL = 'http://syncplay.pl/'
<commit_before>version = '1.3.0' milestone = 'Chami' release_number = '4' projectURL = 'http://syncplay.pl/' <commit_msg>Increase release number to 5 (1.3.0 Beta 3a)<commit_after>
version = '1.3.0' milestone = 'Chami' release_number = '5' projectURL = 'http://syncplay.pl/'
version = '1.3.0' milestone = 'Chami' release_number = '4' projectURL = 'http://syncplay.pl/' Increase release number to 5 (1.3.0 Beta 3a)version = '1.3.0' milestone = 'Chami' release_number = '5' projectURL = 'http://syncplay.pl/'
<commit_before>version = '1.3.0' milestone = 'Chami' release_number = '4' projectURL = 'http://syncplay.pl/' <commit_msg>Increase release number to 5 (1.3.0 Beta 3a)<commit_after>version = '1.3.0' milestone = 'Chami' release_number = '5' projectURL = 'http://syncplay.pl/'
205616b0a23143cdc5ceb6fb8333cf6074ce737b
kitchen/pycompat25/collections/__init__.py
kitchen/pycompat25/collections/__init__.py
try: from collections import defaultdict except ImportError: from _defaultdict import defaultdict __all__ = ('defaultdict',)
try: #:E0611: deafultdict doesn't exist in python-2.4 or less but that's why we # have it in a try: except:. So we can use our version if necessary. from collections import defaultdict except ImportError: from kitchen.pycompat25.collections._defaultdict import defaultdict __all__ = ('defaultdict',)
Fix pylint error in this module
Fix pylint error in this module
Python
lgpl-2.1
fedora-infra/kitchen,fedora-infra/kitchen
try: from collections import defaultdict except ImportError: from _defaultdict import defaultdict __all__ = ('defaultdict',) Fix pylint error in this module
try: #:E0611: deafultdict doesn't exist in python-2.4 or less but that's why we # have it in a try: except:. So we can use our version if necessary. from collections import defaultdict except ImportError: from kitchen.pycompat25.collections._defaultdict import defaultdict __all__ = ('defaultdict',)
<commit_before>try: from collections import defaultdict except ImportError: from _defaultdict import defaultdict __all__ = ('defaultdict',) <commit_msg>Fix pylint error in this module<commit_after>
try: #:E0611: deafultdict doesn't exist in python-2.4 or less but that's why we # have it in a try: except:. So we can use our version if necessary. from collections import defaultdict except ImportError: from kitchen.pycompat25.collections._defaultdict import defaultdict __all__ = ('defaultdict',)
try: from collections import defaultdict except ImportError: from _defaultdict import defaultdict __all__ = ('defaultdict',) Fix pylint error in this moduletry: #:E0611: deafultdict doesn't exist in python-2.4 or less but that's why we # have it in a try: except:. So we can use our version if necessary. from collections import defaultdict except ImportError: from kitchen.pycompat25.collections._defaultdict import defaultdict __all__ = ('defaultdict',)
<commit_before>try: from collections import defaultdict except ImportError: from _defaultdict import defaultdict __all__ = ('defaultdict',) <commit_msg>Fix pylint error in this module<commit_after>try: #:E0611: deafultdict doesn't exist in python-2.4 or less but that's why we # have it in a try: except:. So we can use our version if necessary. from collections import defaultdict except ImportError: from kitchen.pycompat25.collections._defaultdict import defaultdict __all__ = ('defaultdict',)
b0ae4cb386411ae8ae5fd27b19ddb415d0772cf3
democracy_club/apps/everyelection/forms.py
democracy_club/apps/everyelection/forms.py
from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs)
from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): if 'areas' in self.cleaned_data: for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs)
Check that at least one area has been checked
Check that at least one area has been checked
Python
bsd-3-clause
DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website
from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs) Check that at least one area has been checked
from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): if 'areas' in self.cleaned_data: for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs)
<commit_before>from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs) <commit_msg>Check that at least one area has been checked<commit_after>
from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): if 'areas' in self.cleaned_data: for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs)
from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs) Check that at least one area has been checkedfrom django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): if 'areas' in self.cleaned_data: for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs)
<commit_before>from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs) <commit_msg>Check that at least one area has been checked<commit_after>from django.forms import (ModelForm, CheckboxSelectMultiple, MultipleChoiceField) from .models import AuthorityElection, AuthorityElectionPosition class AuthorityAreaForm(ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user # import ipdb; ipdb.set_trace(). self.fields['areas'] = MultipleChoiceField( choices=[ (a.pk, a.name) for a in self.instance.authority.child_areas], label="Wards", widget=CheckboxSelectMultiple ) class Meta: model = AuthorityElection fields = [] def clean(self, *args, **kwargs): if 'areas' in self.cleaned_data: for area in self.cleaned_data['areas']: AuthorityElectionPosition.objects.get_or_create( authority_election=self.instance, user=self.user, area_id=area ) return super().clean(*args, **kwargs)
1e5d68e8cbd592f1bd7535c74948f2bf5f95b4f1
mqtt_logger/tests.py
mqtt_logger/tests.py
from django.test import TestCase # Create your tests here.
from django.test import TestCase from models import * class SubscriptionTests(TestCase): @classmethod def setUpTestData(cls): sub = MQTTSubscription(server='localhost', topic='#') sub.save() cls.sub = sub def test_callback_function(self): sub = type(self).sub # Create a message to save class ToyMessage(): def __init__(self, topic, payload): self.topic = topic self.payload = payload test_topic = 'test' test_payload = '123abc' test_msg = ToyMessage(test_topic, test_payload) # Save the message using the callback function sub._record_message_callback(client=None, userdata=None, message=test_msg) # Get the newest message in the database and check for equality msg = MQTTMessage.objects.last() self.assertEqual(msg.topic, test_topic) self.assertEqual(msg.payload, test_payload)
Add test of callback function.
Add test of callback function.
Python
mit
ast0815/mqtt-hub,ast0815/mqtt-hub
from django.test import TestCase # Create your tests here. Add test of callback function.
from django.test import TestCase from models import * class SubscriptionTests(TestCase): @classmethod def setUpTestData(cls): sub = MQTTSubscription(server='localhost', topic='#') sub.save() cls.sub = sub def test_callback_function(self): sub = type(self).sub # Create a message to save class ToyMessage(): def __init__(self, topic, payload): self.topic = topic self.payload = payload test_topic = 'test' test_payload = '123abc' test_msg = ToyMessage(test_topic, test_payload) # Save the message using the callback function sub._record_message_callback(client=None, userdata=None, message=test_msg) # Get the newest message in the database and check for equality msg = MQTTMessage.objects.last() self.assertEqual(msg.topic, test_topic) self.assertEqual(msg.payload, test_payload)
<commit_before>from django.test import TestCase # Create your tests here. <commit_msg>Add test of callback function.<commit_after>
from django.test import TestCase from models import * class SubscriptionTests(TestCase): @classmethod def setUpTestData(cls): sub = MQTTSubscription(server='localhost', topic='#') sub.save() cls.sub = sub def test_callback_function(self): sub = type(self).sub # Create a message to save class ToyMessage(): def __init__(self, topic, payload): self.topic = topic self.payload = payload test_topic = 'test' test_payload = '123abc' test_msg = ToyMessage(test_topic, test_payload) # Save the message using the callback function sub._record_message_callback(client=None, userdata=None, message=test_msg) # Get the newest message in the database and check for equality msg = MQTTMessage.objects.last() self.assertEqual(msg.topic, test_topic) self.assertEqual(msg.payload, test_payload)
from django.test import TestCase # Create your tests here. Add test of callback function.from django.test import TestCase from models import * class SubscriptionTests(TestCase): @classmethod def setUpTestData(cls): sub = MQTTSubscription(server='localhost', topic='#') sub.save() cls.sub = sub def test_callback_function(self): sub = type(self).sub # Create a message to save class ToyMessage(): def __init__(self, topic, payload): self.topic = topic self.payload = payload test_topic = 'test' test_payload = '123abc' test_msg = ToyMessage(test_topic, test_payload) # Save the message using the callback function sub._record_message_callback(client=None, userdata=None, message=test_msg) # Get the newest message in the database and check for equality msg = MQTTMessage.objects.last() self.assertEqual(msg.topic, test_topic) self.assertEqual(msg.payload, test_payload)
<commit_before>from django.test import TestCase # Create your tests here. <commit_msg>Add test of callback function.<commit_after>from django.test import TestCase from models import * class SubscriptionTests(TestCase): @classmethod def setUpTestData(cls): sub = MQTTSubscription(server='localhost', topic='#') sub.save() cls.sub = sub def test_callback_function(self): sub = type(self).sub # Create a message to save class ToyMessage(): def __init__(self, topic, payload): self.topic = topic self.payload = payload test_topic = 'test' test_payload = '123abc' test_msg = ToyMessage(test_topic, test_payload) # Save the message using the callback function sub._record_message_callback(client=None, userdata=None, message=test_msg) # Get the newest message in the database and check for equality msg = MQTTMessage.objects.last() self.assertEqual(msg.topic, test_topic) self.assertEqual(msg.payload, test_payload)
e7e0b8b723382e7a187e02c6a3052dacefc84bbe
faaopendata/faa_data_cleaner.py
faaopendata/faa_data_cleaner.py
## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] writer.writerow(new_row_without_trailing_comma)
## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: # some of the data contains trailing spaces/tabs, so we remove those first new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] # write out a new CSV, which is then imported into Postgres writer.writerow(new_row_without_trailing_comma)
Add comments to explain what the script is doing on ingest
Add comments to explain what the script is doing on ingest
Python
apache-2.0
GISDev01/adsbpostgis
## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] writer.writerow(new_row_without_trailing_comma) Add comments to explain what the script is doing on ingest
## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: # some of the data contains trailing spaces/tabs, so we remove those first new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] # write out a new CSV, which is then imported into Postgres writer.writerow(new_row_without_trailing_comma)
<commit_before>## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] writer.writerow(new_row_without_trailing_comma) <commit_msg>Add comments to explain what the script is doing on ingest<commit_after>
## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: # some of the data contains trailing spaces/tabs, so we remove those first new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] # write out a new CSV, which is then imported into Postgres writer.writerow(new_row_without_trailing_comma)
## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] writer.writerow(new_row_without_trailing_comma) Add comments to explain what the script is doing on ingest## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: # some of the data contains trailing spaces/tabs, so we remove those first new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] # write out a new CSV, which is then imported into Postgres writer.writerow(new_row_without_trailing_comma)
<commit_before>## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] writer.writerow(new_row_without_trailing_comma) <commit_msg>Add comments to explain what the script is doing on ingest<commit_after>## A quick script to fix the ridiculous format of the raw FAA download data (at least it's available though!) import csv # MASTER.txt is from https://www.faa.gov/licenses_certificates/aircraft_certification/aircraft_registry/releasable_aircraft_download/ with open("MASTER.txt") as orig_file: orig_file_reader = csv.reader(orig_file, delimiter=",") with open("/temp/MASTER_CLEANED.csv", "w", newline='') as clean_file: writer = csv.writer(clean_file) for orig_record in orig_file_reader: # some of the data contains trailing spaces/tabs, so we remove those first new_row = [old_field_data.strip() for old_field_data in orig_record] # The data has a trailing comma on every single row (including the header row), so remove it as well new_row_without_trailing_comma = new_row[:-1] # write out a new CSV, which is then imported into Postgres writer.writerow(new_row_without_trailing_comma)
6d888061089648f2363f77f48fb7458a7ff16735
pyportify/serializers.py
pyportify/serializers.py
class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @staticmethod def from_spotify(self, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return Track(artist, name, track_id) @staticmethod def from_gpm(self, track): return Track( track.get("artist"), track.get("title"), track.get("storeId") )
class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @classmethod def from_spotify(cls, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return cls(artist, name, track_id) @classmethod def from_gpm(cls, track): return cls( track.get("artist"), track.get("title"), track.get("storeId") )
Change from_spotify and from_gpm to classmethods
Change from_spotify and from_gpm to classmethods
Python
apache-2.0
rckclmbr/pyportify,rckclmbr/pyportify,rckclmbr/pyportify,rckclmbr/pyportify
class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @staticmethod def from_spotify(self, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return Track(artist, name, track_id) @staticmethod def from_gpm(self, track): return Track( track.get("artist"), track.get("title"), track.get("storeId") ) Change from_spotify and from_gpm to classmethods
class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @classmethod def from_spotify(cls, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return cls(artist, name, track_id) @classmethod def from_gpm(cls, track): return cls( track.get("artist"), track.get("title"), track.get("storeId") )
<commit_before>class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @staticmethod def from_spotify(self, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return Track(artist, name, track_id) @staticmethod def from_gpm(self, track): return Track( track.get("artist"), track.get("title"), track.get("storeId") ) <commit_msg>Change from_spotify and from_gpm to classmethods<commit_after>
class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @classmethod def from_spotify(cls, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return cls(artist, name, track_id) @classmethod def from_gpm(cls, track): return cls( track.get("artist"), track.get("title"), track.get("storeId") )
class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @staticmethod def from_spotify(self, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return Track(artist, name, track_id) @staticmethod def from_gpm(self, track): return Track( track.get("artist"), track.get("title"), track.get("storeId") ) Change from_spotify and from_gpm to classmethodsclass Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @classmethod def from_spotify(cls, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return cls(artist, name, track_id) @classmethod def from_gpm(cls, track): return cls( track.get("artist"), track.get("title"), track.get("storeId") )
<commit_before>class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @staticmethod def from_spotify(self, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return Track(artist, name, track_id) @staticmethod def from_gpm(self, track): return Track( track.get("artist"), track.get("title"), track.get("storeId") ) <commit_msg>Change from_spotify and from_gpm to classmethods<commit_after>class Track(): artist = "" name = "" track_id = "" def __init__(self, artist, name, track_id=""): self.artist = artist self.name = name self.track_id = track_id @classmethod def from_spotify(cls, track): track_id = track.get("id") name = track.get("name") artist = "" if "artists" in track: artist = track["artists"][0]["name"] return cls(artist, name, track_id) @classmethod def from_gpm(cls, track): return cls( track.get("artist"), track.get("title"), track.get("storeId") )
1071e663eb38a3981cea047c1b2e24d6e119f94d
db/api/serializers.py
db/api/serializers.py
from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad') def get_norad(self, obj): return obj.satellite.norad_cat_id
from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad_cat_id = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad_cat_id') def get_norad_cat_id(self, obj): return obj.satellite.norad_cat_id
Change norad API field to proper name
Change norad API field to proper name
Python
agpl-3.0
Roboneet/satnogs-db,Roboneet/satnogs-db,Roboneet/satnogs-db,Roboneet/satnogs-db
from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad') def get_norad(self, obj): return obj.satellite.norad_cat_id Change norad API field to proper name
from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad_cat_id = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad_cat_id') def get_norad_cat_id(self, obj): return obj.satellite.norad_cat_id
<commit_before>from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad') def get_norad(self, obj): return obj.satellite.norad_cat_id <commit_msg>Change norad API field to proper name<commit_after>
from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad_cat_id = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad_cat_id') def get_norad_cat_id(self, obj): return obj.satellite.norad_cat_id
from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad') def get_norad(self, obj): return obj.satellite.norad_cat_id Change norad API field to proper namefrom rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad_cat_id = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad_cat_id') def get_norad_cat_id(self, obj): return obj.satellite.norad_cat_id
<commit_before>from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad') def get_norad(self, obj): return obj.satellite.norad_cat_id <commit_msg>Change norad API field to proper name<commit_after>from rest_framework import serializers from db.base.models import Satellite, Transponder class SatelliteSerializer(serializers.ModelSerializer): class Meta: model = Satellite class TransponderSerializer(serializers.ModelSerializer): norad_cat_id = serializers.SerializerMethodField() class Meta: model = Transponder fields = ('uuid', 'description', 'alive', 'uplink_low', 'uplink_high', 'downlink_low', 'downlink_high', 'mode', 'invert', 'baud', 'norad_cat_id') def get_norad_cat_id(self, obj): return obj.satellite.norad_cat_id
990de574769cf6b64dfc90128838e209af377ae0
sourcestats/collector/__main__.py
sourcestats/collector/__main__.py
# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') gevent.spawn(find) gevent.spawn(collect) gevent.spawn(index) try: gevent.wait() except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0)
# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') greenlets = [ gevent.spawn(find), gevent.spawn(collect), gevent.spawn(index) ] try: while True: # Get greenlets which have stopped (should be empty list) greenlet_states = [greenlet.ready() for greenlet in greenlets] greenlet_states = filter(lambda x: x, greenlet_states) # If we have any, something broke! if len(greenlet_states) > 0: break gevent.sleep(1) logger.critical('One of the greenlets stopped, exiting!') except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0)
Make the collector exit if one of it's greenlets fails.
Make the collector exit if one of it's greenlets fails.
Python
mit
Fizzadar/SourceServerStats,Fizzadar/SourceServerStats,Fizzadar/SourceServerStats
# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') gevent.spawn(find) gevent.spawn(collect) gevent.spawn(index) try: gevent.wait() except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0) Make the collector exit if one of it's greenlets fails.
# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') greenlets = [ gevent.spawn(find), gevent.spawn(collect), gevent.spawn(index) ] try: while True: # Get greenlets which have stopped (should be empty list) greenlet_states = [greenlet.ready() for greenlet in greenlets] greenlet_states = filter(lambda x: x, greenlet_states) # If we have any, something broke! if len(greenlet_states) > 0: break gevent.sleep(1) logger.critical('One of the greenlets stopped, exiting!') except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0)
<commit_before># Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') gevent.spawn(find) gevent.spawn(collect) gevent.spawn(index) try: gevent.wait() except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0) <commit_msg>Make the collector exit if one of it's greenlets fails.<commit_after>
# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') greenlets = [ gevent.spawn(find), gevent.spawn(collect), gevent.spawn(index) ] try: while True: # Get greenlets which have stopped (should be empty list) greenlet_states = [greenlet.ready() for greenlet in greenlets] greenlet_states = filter(lambda x: x, greenlet_states) # If we have any, something broke! if len(greenlet_states) > 0: break gevent.sleep(1) logger.critical('One of the greenlets stopped, exiting!') except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0)
# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') gevent.spawn(find) gevent.spawn(collect) gevent.spawn(index) try: gevent.wait() except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0) Make the collector exit if one of it's greenlets fails.# Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') greenlets = [ gevent.spawn(find), gevent.spawn(collect), gevent.spawn(index) ] try: while True: # Get greenlets which have stopped (should be empty list) greenlet_states = [greenlet.ready() for greenlet in greenlets] greenlet_states = filter(lambda x: x, greenlet_states) # If we have any, something broke! if len(greenlet_states) > 0: break gevent.sleep(1) logger.critical('One of the greenlets stopped, exiting!') except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0)
<commit_before># Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') gevent.spawn(find) gevent.spawn(collect) gevent.spawn(index) try: gevent.wait() except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0) <commit_msg>Make the collector exit if one of it's greenlets fails.<commit_after># Source Server Stats # File: sourcestats/collector/__main__.py # Desc: __main__ for the collector from gevent import monkey monkey.patch_all() import sys import logging import gevent from coloredlogs import ColoredStreamHandler from .. import logger from . import find, collect, index if __name__ == '__main__': logging.getLogger().setLevel(logging.CRITICAL) log_level = logging.DEBUG if '--debug' in sys.argv else logging.INFO color_args = { 'show_timestamps': False, 'show_hostname': False } handler = ColoredStreamHandler(level=log_level, **color_args) logger.setLevel(log_level) logger.addHandler(handler) logger.info('Starting find, collect & index workers...') greenlets = [ gevent.spawn(find), gevent.spawn(collect), gevent.spawn(index) ] try: while True: # Get greenlets which have stopped (should be empty list) greenlet_states = [greenlet.ready() for greenlet in greenlets] greenlet_states = filter(lambda x: x, greenlet_states) # If we have any, something broke! if len(greenlet_states) > 0: break gevent.sleep(1) logger.critical('One of the greenlets stopped, exiting!') except KeyboardInterrupt: print 'Exiting upon user request...' raise SystemExit(0)
900f5fab722d32762b8a5fa214838f84b3fc376c
speech_recognition/__main__.py
speech_recognition/__main__.py
import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that")
import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() try: print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") except KeyboardInterrupt: pass
Handle Ctrl + C when running demo in the terminal.
Handle Ctrl + C when running demo in the terminal.
Python
bsd-3-clause
adrianzhang/speech_recognition,Python-Devs-Brasil/speech_recognition,Uberi/speech_recognition,Python-Devs-Brasil/speech_recognition,Zenohm/speech_recognition,jjsg/speech_recognition,Uberi/speech_recognition,adrianzhang/speech_recognition,Zenohm/speech_recognition,jjsg/speech_recognition,arvindch/speech_recognition,arvindch/speech_recognition
import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") Handle Ctrl + C when running demo in the terminal.
import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() try: print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") except KeyboardInterrupt: pass
<commit_before>import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") <commit_msg>Handle Ctrl + C when running demo in the terminal.<commit_after>
import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() try: print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") except KeyboardInterrupt: pass
import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") Handle Ctrl + C when running demo in the terminal.import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() try: print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") except KeyboardInterrupt: pass
<commit_before>import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") <commit_msg>Handle Ctrl + C when running demo in the terminal.<commit_after>import speech_recognition as sr r = sr.Recognizer() m = sr.Microphone() try: print("A moment of silence, please...") with m as source: r.adjust_for_ambient_noise(source) print("Set minimum energy threshold to {}".format(r.energy_threshold)) while True: print("Say something!") audio = r.listen(source) print("Got it! Now to recognize it...") try: value = r.recognize(audio) if str is bytes: # this version of Python uses bytes for strings (Python 2) print(u"You said {}".format(value).encode("utf-8")) else: # this version of Python uses unicode for strings (Python 3+) print("You said {}".format(value)) except LookupError: print("Oops! Didn't catch that") except KeyboardInterrupt: pass
61427695c0abb3c9385610a13c78cb21eec388d3
moa/factory_registers.py
moa/factory_registers.py
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate')
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
Update factory registers with new classes.
Update factory registers with new classes.
Python
mit
matham/moa
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') Update factory registers with new classes.
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
<commit_before>from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') <commit_msg>Update factory registers with new classes.<commit_after>
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') Update factory registers with new classes.from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
<commit_before>from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') <commit_msg>Update factory registers with new classes.<commit_after>from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
1cc83f902fbfaaf571c15927aa633af361c47f78
aafig/setup.py
aafig/setup.py
# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='sphinxcontrib-aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/sphinxcontrib-aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], )
Rename package as sphinxcontrib-aafig * * * aafig: Fix package name in PYPI URL
aafig: Rename package as sphinxcontrib-aafig * * * aafig: Fix package name in PYPI URL
Python
bsd-2-clause
sphinx-contrib/spelling,sphinx-contrib/spelling
# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], ) aafig: Rename package as sphinxcontrib-aafig * * * aafig: Fix package name in PYPI URL
# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='sphinxcontrib-aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/sphinxcontrib-aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], ) <commit_msg>aafig: Rename package as sphinxcontrib-aafig * * * aafig: Fix package name in PYPI URL<commit_after>
# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='sphinxcontrib-aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/sphinxcontrib-aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], ) aafig: Rename package as sphinxcontrib-aafig * * * aafig: Fix package name in PYPI URL# -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='sphinxcontrib-aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/sphinxcontrib-aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], ) <commit_msg>aafig: Rename package as sphinxcontrib-aafig * * * aafig: Fix package name in PYPI URL<commit_after># -*- coding: utf-8 -*- from setuptools import setup, find_packages long_desc = ''' This package contains the aafigure Sphinx extension. Allow embeded ASCII art figure to be rendered as nice images. ''' requires = ['Sphinx>=0.6'] setup( name='sphinxcontrib-aafig', version='0.1', url='http://bitbucket.org/birkenfeld/sphinx-contrib', download_url='http://pypi.python.org/pypi/sphinxcontrib-aafig', license='BSD', author='Leandro Lucarella', author_email='llucax@gmail.com', description='Sphinx extension aafig', long_description=long_desc, zip_safe=False, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Documentation', 'Topic :: Utilities', ], platforms='any', packages=find_packages(), include_package_data=True, install_requires=requires, namespace_packages=['sphinxcontrib'], )
073d4d65edc93247c24f179ee93d061a0024057a
web/migrations/0001_initial.py
web/migrations/0001_initial.py
# Generated by Django 3.2.6 on 2021-08-23 18:33 # pylint:disable=line-too-long # pylint:disable=missing-module-docstring # pylint:disable= from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ]
# Generated by Django 3.2.6 on 2021-08-23 18:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ]
Remove pylint disables (for now)
Remove pylint disables (for now)
Python
agpl-3.0
codethesaurus/codethesaur.us,codethesaurus/codethesaur.us
# Generated by Django 3.2.6 on 2021-08-23 18:33 # pylint:disable=line-too-long # pylint:disable=missing-module-docstring # pylint:disable= from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ] Remove pylint disables (for now)
# Generated by Django 3.2.6 on 2021-08-23 18:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ]
<commit_before># Generated by Django 3.2.6 on 2021-08-23 18:33 # pylint:disable=line-too-long # pylint:disable=missing-module-docstring # pylint:disable= from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ] <commit_msg>Remove pylint disables (for now)<commit_after>
# Generated by Django 3.2.6 on 2021-08-23 18:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ]
# Generated by Django 3.2.6 on 2021-08-23 18:33 # pylint:disable=line-too-long # pylint:disable=missing-module-docstring # pylint:disable= from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ] Remove pylint disables (for now)# Generated by Django 3.2.6 on 2021-08-23 18:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ]
<commit_before># Generated by Django 3.2.6 on 2021-08-23 18:33 # pylint:disable=line-too-long # pylint:disable=missing-module-docstring # pylint:disable= from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ] <commit_msg>Remove pylint disables (for now)<commit_after># Generated by Django 3.2.6 on 2021-08-23 18:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SiteVisit', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date_time', models.DateTimeField(auto_now_add=True)), ('url', models.URLField(max_length=300)), ('user_agent', models.CharField(max_length=300)), ('referer', models.CharField(max_length=300)), ], ), migrations.CreateModel( name='LookupData', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('language1', models.CharField(max_length=50)), ('language2', models.CharField(max_length=50)), ('structure', models.CharField(max_length=50)), ('site_visit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.sitevisit')), ], ), ]
71f00a03d6cbe4dc4d3cd2362ef91bd192a9a31e
who_broke_build.py
who_broke_build.py
import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] )
import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) if __name__ == '__main__': jenkins_wait_for_event()
Add point to execute program
Add point to execute program
Python
mit
mrteera/who-broke-build-slack,mrteera/who-broke-build-slack,zkan/who-broke-build-slack,prontodev/who-broke-build-slack,prontodev/who-broke-build-slack,zkan/who-broke-build-slack
import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) Add point to execute program
import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) if __name__ == '__main__': jenkins_wait_for_event()
<commit_before>import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) <commit_msg>Add point to execute program<commit_after>
import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) if __name__ == '__main__': jenkins_wait_for_event()
import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) Add point to execute programimport json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) if __name__ == '__main__': jenkins_wait_for_event()
<commit_before>import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) <commit_msg>Add point to execute program<commit_after>import json import re import requests import socket import settings def get_responsible_user(full_url): members = settings.TEAM_MEMBERS response = requests.get( full_url, auth=( settings.JENKINS_USERNAME, settings.JENKINS_PASSWORD ) ) for each in members: if ('Started by GitHub push by ' + each in response.content or \ 'Started by user ' + each in response.content): return each def wait_for_event(): return True def jenkins_wait_for_event(): sock = socket.socket(settings.AF_INET, settings.SOCK_DGRAM) sock.bind(('', settings.JENKINS_NOTIFICATION_UDP_PORT)) while wait_for_event(): data, _ = sock.recvfrom(8 * 1024) notification_data = json.loads(data) status = notification_data['build']['status'].upper() phase = notification_data['build']['phase'].upper() if phase == 'COMPLETED' and status.startswith('FAIL'): target = get_responsible_user( notification_data['build']['full_url'] ) if __name__ == '__main__': jenkins_wait_for_event()
acfd2165c400b7318cea7c4e74db050968f9a123
api.py
api.py
import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port)
import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) def register_resource(resource, schema): """Register a new resource with the given schema. .. note:: This method calls Flask's add_url_rule under the hood, which raises an AssertionError in debugging mode when used after the first request was served.""" api.register_resource(resource, {'item_title': resource, 'schema': schema, 'resource_methods': ['GET', 'POST', 'DELETE']}) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port)
Add utility method to register a resource
Add utility method to register a resource
Python
apache-2.0
gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa
import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port) Add utility method to register a resource
import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) def register_resource(resource, schema): """Register a new resource with the given schema. .. note:: This method calls Flask's add_url_rule under the hood, which raises an AssertionError in debugging mode when used after the first request was served.""" api.register_resource(resource, {'item_title': resource, 'schema': schema, 'resource_methods': ['GET', 'POST', 'DELETE']}) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port)
<commit_before>import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port) <commit_msg>Add utility method to register a resource<commit_after>
import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) def register_resource(resource, schema): """Register a new resource with the given schema. .. note:: This method calls Flask's add_url_rule under the hood, which raises an AssertionError in debugging mode when used after the first request was served.""" api.register_resource(resource, {'item_title': resource, 'schema': schema, 'resource_methods': ['GET', 'POST', 'DELETE']}) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port)
import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port) Add utility method to register a resourceimport json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) def register_resource(resource, schema): """Register a new resource with the given schema. .. note:: This method calls Flask's add_url_rule under the hood, which raises an AssertionError in debugging mode when used after the first request was served.""" api.register_resource(resource, {'item_title': resource, 'schema': schema, 'resource_methods': ['GET', 'POST', 'DELETE']}) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port)
<commit_before>import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port) <commit_msg>Add utility method to register a resource<commit_after>import json from os import environ from eve import Eve from eve.io.mongo import Validator from settings import API_NAME, URL_PREFIX class KeySchemaValidator(Validator): def _validate_keyschema(self, schema, field, dct): "Validate all keys of dictionary `dct` against schema `schema`." for key, value in dct.items(): self._validate_schema(schema, key, value) api = Eve(API_NAME, validator=KeySchemaValidator) def add_document(resource, document): "Add a new document to the given resource." return api.test_client().post('/' + URL_PREFIX + '/' + resource, data=json.dumps(document), content_type='application/json') def delete_resource(resource): "Delete all documents of the given resource." return api.test_client().delete('/' + URL_PREFIX + '/' + resource) def register_resource(resource, schema): """Register a new resource with the given schema. .. note:: This method calls Flask's add_url_rule under the hood, which raises an AssertionError in debugging mode when used after the first request was served.""" api.register_resource(resource, {'item_title': resource, 'schema': schema, 'resource_methods': ['GET', 'POST', 'DELETE']}) if __name__ == '__main__': # Heroku support: bind to PORT if defined, otherwise default to 5000. if 'PORT' in environ: port = int(environ.get('PORT')) host = '0.0.0.0' else: port = 5000 host = '127.0.0.1' api.run(host=host, port=port)
fdb7617539d63cd3522c41489817eb841374f525
django_gears/views.py
django_gears/views.py
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') asset = build_asset(environment, normalized_path) if not asset: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from gears.exceptions import FileNotFound from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') try: asset = build_asset(environment, normalized_path) except FileNotFound: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
Handle FileNotFound exception from build_asset
Handle FileNotFound exception from build_asset
Python
isc
juliomenendez/django-gears,juliomenendez/django-gears,wiserthanever/django-gears,juliomenendez/django-gears,gears/django-gears,juliomenendez/django-gears,wiserthanever/django-gears,gears/django-gears,wiserthanever/django-gears,wiserthanever/django-gears,gears/django-gears
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') asset = build_asset(environment, normalized_path) if not asset: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response Handle FileNotFound exception from build_asset
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from gears.exceptions import FileNotFound from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') try: asset = build_asset(environment, normalized_path) except FileNotFound: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
<commit_before>import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') asset = build_asset(environment, normalized_path) if not asset: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response <commit_msg>Handle FileNotFound exception from build_asset<commit_after>
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from gears.exceptions import FileNotFound from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') try: asset = build_asset(environment, normalized_path) except FileNotFound: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') asset = build_asset(environment, normalized_path) if not asset: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response Handle FileNotFound exception from build_assetimport mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from gears.exceptions import FileNotFound from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') try: asset = build_asset(environment, normalized_path) except FileNotFound: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
<commit_before>import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') asset = build_asset(environment, normalized_path) if not asset: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response <commit_msg>Handle FileNotFound exception from build_asset<commit_after>import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from gears.exceptions import FileNotFound from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') try: asset = build_asset(environment, normalized_path) except FileNotFound: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
966c466fd31c1d07d6b978fa4b26d6d068fd8b37
mysite/profile/management/commands/profile_hourly_tasks.py
mysite/profile/management/commands/profile_hourly_tasks.py
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): rootLogger = logging.getLogger('') rootLogger.setLevel(logging.WARN) mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
Set default logging level to WARN.
Set default logging level to WARN.
Python
agpl-3.0
jledbetter/openhatch,heeraj123/oh-mainline,willingc/oh-mainline,vipul-sharma20/oh-mainline,moijes12/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,moijes12/oh-mainline,heeraj123/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,nirmeshk/oh-mainline,moijes12/oh-mainline,ojengwa/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,eeshangarg/oh-mainline,heeraj123/oh-mainline,openhatch/oh-mainline,Changaco/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,Changaco/oh-mainline,nirmeshk/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,jledbetter/openhatch,vipul-sharma20/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,jledbetter/openhatch,mzdaniel/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,willingc/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,eeshangarg/oh-mainline,Changaco/oh-mainline,nirmeshk/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,heeraj123/oh-mainline,openhatch/oh-mainline,openhatch/oh-mainline,mzdaniel/oh-mainline,willingc/oh-mainline,sudheesh001/oh-mainline,SnappleCap/oh-mainline,onceuponatimeforever/oh-mainline,Changaco/oh-mainline,SnappleCap/oh-mainline,mzdaniel/oh-mainline,waseem18/oh-mainline,ehashman/oh-mainline,campbe13/openhatch,waseem18/oh-mainline,ehashman/oh-mainline,heeraj123/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,mzdaniel/oh-mainline,willingc/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,vipul-sharma20/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch() Set default logging level to WARN.
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): rootLogger = logging.getLogger('') rootLogger.setLevel(logging.WARN) mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
<commit_before>import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch() <commit_msg>Set default logging level to WARN.<commit_after>
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): rootLogger = logging.getLogger('') rootLogger.setLevel(logging.WARN) mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch() Set default logging level to WARN.import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): rootLogger = logging.getLogger('') rootLogger.setLevel(logging.WARN) mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
<commit_before>import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch() <commit_msg>Set default logging level to WARN.<commit_after>import datetime import logging from django.core.management.base import BaseCommand import mysite.profile.tasks import mysite.search.models import mysite.search.tasks ## FIXME: Move to a search management command? def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch(): logging.info("Checking if bug epoch eclipsed the cached search epoch") cache_time = mysite.search.models.Epoch.get_for_string('search_cache') bug_time = mysite.search.models.Epoch.get_for_string('search_cache') if cache_time < bug_time: mysite.search.tasks.clear_search_cache() mysite.search.models.Epoch.bump_for_string('search_cache') logging.info("Finished dealing with bug epoch vs. cached search epoch.") class Command(BaseCommand): help = "Run this once hourly for the OpenHatch profile app." def handle(self, *args, **options): rootLogger = logging.getLogger('') rootLogger.setLevel(logging.WARN) mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache() mysite.profile.tasks.fill_recommended_bugs_cache() # Every 4 hours, clear search cache if (datetime.datetime.utcnow().hour % 4) == 0: periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
cf12d0560e8eaedae054c3276857be84c425a89c
ir/__init__.py
ir/__init__.py
from aqt import mw from .main import ReadingManager __version__ = '4.3.1' mw.readingManager = ReadingManager()
from aqt import mw from .main import ReadingManager mw.readingManager = ReadingManager()
Remove version number from init
Remove version number from init
Python
isc
luoliyan/incremental-reading-for-anki,luoliyan/incremental-reading-for-anki
from aqt import mw from .main import ReadingManager __version__ = '4.3.1' mw.readingManager = ReadingManager() Remove version number from init
from aqt import mw from .main import ReadingManager mw.readingManager = ReadingManager()
<commit_before>from aqt import mw from .main import ReadingManager __version__ = '4.3.1' mw.readingManager = ReadingManager() <commit_msg>Remove version number from init<commit_after>
from aqt import mw from .main import ReadingManager mw.readingManager = ReadingManager()
from aqt import mw from .main import ReadingManager __version__ = '4.3.1' mw.readingManager = ReadingManager() Remove version number from initfrom aqt import mw from .main import ReadingManager mw.readingManager = ReadingManager()
<commit_before>from aqt import mw from .main import ReadingManager __version__ = '4.3.1' mw.readingManager = ReadingManager() <commit_msg>Remove version number from init<commit_after>from aqt import mw from .main import ReadingManager mw.readingManager = ReadingManager()
f57f3a5a7abec6c9c6077c213cf29ef3fd9b4483
tools/test_sneeze.py
tools/test_sneeze.py
import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d", "from nipype.interfaces import afni", "import nipype.interfaces.afni"] def test_from_namespace(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') fp = open(fname, 'w') fp.write('from nipype.interfaces.afni import To3d') fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path #print cmd assert_equal(cmd, 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni') if os.path.exists(dname): rmtree(dname)
import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d, ThreeDRefit", "from nipype.interfaces import afni", "import nipype.interfaces.afni", "from nipype.interfaces import afni as af"] def test_imports(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') for impt in import_strings: fp = open(fname, 'w') fp.write(impt) fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path yield assert_equal, cmd, \ 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni' if os.path.exists(dname): rmtree(dname)
Add more tests for sneeze.
Add more tests for sneeze.
Python
bsd-3-clause
yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD
import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d", "from nipype.interfaces import afni", "import nipype.interfaces.afni"] def test_from_namespace(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') fp = open(fname, 'w') fp.write('from nipype.interfaces.afni import To3d') fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path #print cmd assert_equal(cmd, 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni') if os.path.exists(dname): rmtree(dname) Add more tests for sneeze.
import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d, ThreeDRefit", "from nipype.interfaces import afni", "import nipype.interfaces.afni", "from nipype.interfaces import afni as af"] def test_imports(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') for impt in import_strings: fp = open(fname, 'w') fp.write(impt) fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path yield assert_equal, cmd, \ 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni' if os.path.exists(dname): rmtree(dname)
<commit_before> import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d", "from nipype.interfaces import afni", "import nipype.interfaces.afni"] def test_from_namespace(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') fp = open(fname, 'w') fp.write('from nipype.interfaces.afni import To3d') fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path #print cmd assert_equal(cmd, 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni') if os.path.exists(dname): rmtree(dname) <commit_msg>Add more tests for sneeze.<commit_after>
import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d, ThreeDRefit", "from nipype.interfaces import afni", "import nipype.interfaces.afni", "from nipype.interfaces import afni as af"] def test_imports(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') for impt in import_strings: fp = open(fname, 'w') fp.write(impt) fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path yield assert_equal, cmd, \ 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni' if os.path.exists(dname): rmtree(dname)
import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d", "from nipype.interfaces import afni", "import nipype.interfaces.afni"] def test_from_namespace(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') fp = open(fname, 'w') fp.write('from nipype.interfaces.afni import To3d') fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path #print cmd assert_equal(cmd, 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni') if os.path.exists(dname): rmtree(dname) Add more tests for sneeze. import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d, ThreeDRefit", "from nipype.interfaces import afni", "import nipype.interfaces.afni", "from nipype.interfaces import afni as af"] def test_imports(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') for impt in import_strings: fp = open(fname, 'w') fp.write(impt) fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path yield assert_equal, cmd, \ 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni' if os.path.exists(dname): rmtree(dname)
<commit_before> import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d", "from nipype.interfaces import afni", "import nipype.interfaces.afni"] def test_from_namespace(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') fp = open(fname, 'w') fp.write('from nipype.interfaces.afni import To3d') fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path #print cmd assert_equal(cmd, 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni') if os.path.exists(dname): rmtree(dname) <commit_msg>Add more tests for sneeze.<commit_after> import os from tempfile import mkdtemp from shutil import rmtree from nipy.testing import * from sneeze import find_pkg, run_nose import_strings = ["from nipype.interfaces.afni import To3d, ThreeDRefit", "from nipype.interfaces import afni", "import nipype.interfaces.afni", "from nipype.interfaces import afni as af"] def test_imports(): dname = mkdtemp() fname = os.path.join(dname, 'test_afni.py') for impt in import_strings: fp = open(fname, 'w') fp.write(impt) fp.close() cover_pkg, module = find_pkg(fname) cmd = run_nose(cover_pkg, fname, dry_run=True) cmdlst = cmd.split() cmd = ' '.join(cmdlst[:4]) # strip off temporary directory path yield assert_equal, cmd, \ 'nosetests -sv --with-coverage --cover-package=nipype.interfaces.afni' if os.path.exists(dname): rmtree(dname)
3f728b83eb407527588dd5a13a06cc5d1cd11df5
mfh.py
mfh.py
import mfhclient import os import Queue import sys import threading import trigger import update def main(): q = Queue.Queue() updateq = Queue.Queue() mfhclient_thread = threading.Thread( args=(q,), name="mfhclient_thread", target=mfhclient.main, ) mfhclient_thread.start() trigger_thread = threading.Thread( args=(updateq,), name="trigger_thread", target=trigger.trigger, ) trigger_thread.start() count = 0 while True: if updateq.empty() and updateq.get() == "update": q.put("quit") if not mfhclient_thread.is_alive(): updater_thread = threading.Thread( args=("origin", "master"), name="updater_thread", target=update.pull, ) updater_thread.start() updater_thread.join() sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) count += 1 if __name__ == '__main__': main()
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse def main(): q = Event() mfhclient_process = Process( args=(args, q,), name="mfhclient_process", target=mfhclient.main, ) mfhclient_process.start() trigger_process = Process( args=(q,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() main()
Switch from multithreading to multiprocessing
Switch from multithreading to multiprocessing It is easier to work with processes in terms of the current task. The task was to make the client and updater work at the same time. Threads have a lot of limitations and are harder to get right, so I switched to processes. The code went pretty much straightforward. Not on the first try, though.
Python
mit
Zloool/manyfaced-honeypot
import mfhclient import os import Queue import sys import threading import trigger import update def main(): q = Queue.Queue() updateq = Queue.Queue() mfhclient_thread = threading.Thread( args=(q,), name="mfhclient_thread", target=mfhclient.main, ) mfhclient_thread.start() trigger_thread = threading.Thread( args=(updateq,), name="trigger_thread", target=trigger.trigger, ) trigger_thread.start() count = 0 while True: if updateq.empty() and updateq.get() == "update": q.put("quit") if not mfhclient_thread.is_alive(): updater_thread = threading.Thread( args=("origin", "master"), name="updater_thread", target=update.pull, ) updater_thread.start() updater_thread.join() sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) count += 1 if __name__ == '__main__': main() Switch from multithreading to multiprocessing It is easier to work with processes in terms of the current task. The task was to make the client and updater work at the same time. Threads have a lot of limitations and are harder to get right, so I switched to processes. The code went pretty much straightforward. Not on the first try, though.
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse def main(): q = Event() mfhclient_process = Process( args=(args, q,), name="mfhclient_process", target=mfhclient.main, ) mfhclient_process.start() trigger_process = Process( args=(q,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() main()
<commit_before>import mfhclient import os import Queue import sys import threading import trigger import update def main(): q = Queue.Queue() updateq = Queue.Queue() mfhclient_thread = threading.Thread( args=(q,), name="mfhclient_thread", target=mfhclient.main, ) mfhclient_thread.start() trigger_thread = threading.Thread( args=(updateq,), name="trigger_thread", target=trigger.trigger, ) trigger_thread.start() count = 0 while True: if updateq.empty() and updateq.get() == "update": q.put("quit") if not mfhclient_thread.is_alive(): updater_thread = threading.Thread( args=("origin", "master"), name="updater_thread", target=update.pull, ) updater_thread.start() updater_thread.join() sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) count += 1 if __name__ == '__main__': main() <commit_msg>Switch from multithreading to multiprocessing It is easier to work with processes in terms of the current task. The task was to make the client and updater work at the same time. Threads have a lot of limitations and are harder to get right, so I switched to processes. The code went pretty much straightforward. Not on the first try, though.<commit_after>
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse def main(): q = Event() mfhclient_process = Process( args=(args, q,), name="mfhclient_process", target=mfhclient.main, ) mfhclient_process.start() trigger_process = Process( args=(q,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() main()
import mfhclient import os import Queue import sys import threading import trigger import update def main(): q = Queue.Queue() updateq = Queue.Queue() mfhclient_thread = threading.Thread( args=(q,), name="mfhclient_thread", target=mfhclient.main, ) mfhclient_thread.start() trigger_thread = threading.Thread( args=(updateq,), name="trigger_thread", target=trigger.trigger, ) trigger_thread.start() count = 0 while True: if updateq.empty() and updateq.get() == "update": q.put("quit") if not mfhclient_thread.is_alive(): updater_thread = threading.Thread( args=("origin", "master"), name="updater_thread", target=update.pull, ) updater_thread.start() updater_thread.join() sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) count += 1 if __name__ == '__main__': main() Switch from multithreading to multiprocessing It is easier to work with processes in terms of the current task. The task was to make the client and updater work at the same time. Threads have a lot of limitations and are harder to get right, so I switched to processes. The code went pretty much straightforward. Not on the first try, though.import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse def main(): q = Event() mfhclient_process = Process( args=(args, q,), name="mfhclient_process", target=mfhclient.main, ) mfhclient_process.start() trigger_process = Process( args=(q,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() main()
<commit_before>import mfhclient import os import Queue import sys import threading import trigger import update def main(): q = Queue.Queue() updateq = Queue.Queue() mfhclient_thread = threading.Thread( args=(q,), name="mfhclient_thread", target=mfhclient.main, ) mfhclient_thread.start() trigger_thread = threading.Thread( args=(updateq,), name="trigger_thread", target=trigger.trigger, ) trigger_thread.start() count = 0 while True: if updateq.empty() and updateq.get() == "update": q.put("quit") if not mfhclient_thread.is_alive(): updater_thread = threading.Thread( args=("origin", "master"), name="updater_thread", target=update.pull, ) updater_thread.start() updater_thread.join() sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) count += 1 if __name__ == '__main__': main() <commit_msg>Switch from multithreading to multiprocessing It is easier to work with processes in terms of the current task. The task was to make the client and updater work at the same time. Threads have a lot of limitations and are harder to get right, so I switched to processes. The code went pretty much straightforward. Not on the first try, though.<commit_after>import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse def main(): q = Event() mfhclient_process = Process( args=(args, q,), name="mfhclient_process", target=mfhclient.main, ) mfhclient_process.start() trigger_process = Process( args=(q,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() main()
36a2051e8f0c36923d93e172d453ce0e6fe18512
src/tarsnapper/test.py
src/tarsnapper/test.py
from datetime import datetime from expire import expire as default_expire_func __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.backups[str(self.now)] = self.now if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep
from datetime import datetime from expire import expire as default_expire_func from config import parse_deltas __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): if isinstance(deltas, basestring): deltas = parse_deltas(deltas) self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def add(self, backups): for dt in backups: if isinstance(dt, basestring): dt = datetime.strptime(dt, "%Y%m%d-%H%M%S") self.backups[str(dt)] = dt def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.add([self.now]) if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep
Allow using strings for deltas and dates.
Allow using strings for deltas and dates.
Python
bsd-2-clause
jyrkij/tarsnapper
from datetime import datetime from expire import expire as default_expire_func __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.backups[str(self.now)] = self.now if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep Allow using strings for deltas and dates.
from datetime import datetime from expire import expire as default_expire_func from config import parse_deltas __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): if isinstance(deltas, basestring): deltas = parse_deltas(deltas) self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def add(self, backups): for dt in backups: if isinstance(dt, basestring): dt = datetime.strptime(dt, "%Y%m%d-%H%M%S") self.backups[str(dt)] = dt def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.add([self.now]) if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep
<commit_before>from datetime import datetime from expire import expire as default_expire_func __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.backups[str(self.now)] = self.now if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep <commit_msg>Allow using strings for deltas and dates.<commit_after>
from datetime import datetime from expire import expire as default_expire_func from config import parse_deltas __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): if isinstance(deltas, basestring): deltas = parse_deltas(deltas) self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def add(self, backups): for dt in backups: if isinstance(dt, basestring): dt = datetime.strptime(dt, "%Y%m%d-%H%M%S") self.backups[str(dt)] = dt def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.add([self.now]) if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep
from datetime import datetime from expire import expire as default_expire_func __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.backups[str(self.now)] = self.now if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep Allow using strings for deltas and dates.from datetime import datetime from expire import expire as default_expire_func from config import parse_deltas __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): if isinstance(deltas, basestring): deltas = parse_deltas(deltas) self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def add(self, backups): for dt in backups: if isinstance(dt, basestring): dt = datetime.strptime(dt, "%Y%m%d-%H%M%S") self.backups[str(dt)] = dt def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.add([self.now]) if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep
<commit_before>from datetime import datetime from expire import expire as default_expire_func __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.backups[str(self.now)] = self.now if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep <commit_msg>Allow using strings for deltas and dates.<commit_after>from datetime import datetime from expire import expire as default_expire_func from config import parse_deltas __all__ = ('BackupSimulator',) try: from collections import OrderedDict # Python 2.7 except ImportError: # Install from: http://pypi.python.org/pypi/ordereddict from ordereddict import OrderedDict class BackupSimulator(object): """Helper to simulate making backups, and expire old ones, at various points in time. """ def __init__(self, deltas, expire_func=default_expire_func): if isinstance(deltas, basestring): deltas = parse_deltas(deltas) self.deltas = deltas self.expire_func = expire_func self.now = datetime.now() self.backups = OrderedDict() def add(self, backups): for dt in backups: if isinstance(dt, basestring): dt = datetime.strptime(dt, "%Y%m%d-%H%M%S") self.backups[str(dt)] = dt def go_to(self, dt): self.now = dt def go_by(self, td): self.now += td def backup(self, expire=True): self.add([self.now]) if expire: return self.expire() def expire(self): keep = self.expire_func(self.backups, self.deltas) deleted = [] for key in self.backups.keys(): if not key in keep: deleted.append(key) del self.backups[key] return deleted, keep
64f68922cce077716b9b8ec3de0a04ee31b7b7a9
mkt/api/tests/test_base.py
mkt/api/tests/test_base.py
from tastypie import http from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request)
from tastypie import http from tastypie.authentication import Authentication from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() # Ensure that we have proper authentication and authorization. self.resource._meta.authentication = Authentication() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request)
Fix failing TestMarketplace test by ensuring vanilla authentication and authorization
Fix failing TestMarketplace test by ensuring vanilla authentication and authorization
Python
bsd-3-clause
kumar303/addons-server,psiinon/addons-server,jasonthomas/zamboni,Witia1/olympia,Revanth47/addons-server,Joergen/zamboni,eviljeff/zamboni,jamesthechamp/zamboni,wagnerand/zamboni,clouserw/zamboni,wagnerand/zamboni,mstriemer/addons-server,diox/olympia,Hitechverma/zamboni,clouserw/zamboni,Nolski/olympia,mozilla/addons-server,atiqueahmedziad/addons-server,ddurst/zamboni,psiinon/addons-server,kumar303/olympia,Joergen/olympia,diox/olympia,mstriemer/zamboni,washort/zamboni,jasonthomas/zamboni,Witia1/olympia,Hitechverma/zamboni,muffinresearch/addons-server,mozilla/zamboni,mozilla/zamboni,harry-7/addons-server,psiinon/addons-server,ayushagrawal288/zamboni,mudithkr/zamboni,bqbn/addons-server,koehlermichael/olympia,anaran/olympia,beni55/olympia,Joergen/zamboni,SuriyaaKudoIsc/olympia,kumar303/addons-server,ingenioustechie/zamboni,harikishen/addons-server,Joergen/zamboni,wagnerand/addons-server,eviljeff/olympia,eviljeff/zamboni,SuriyaaKudoIsc/olympia,johancz/olympia,tsl143/zamboni,kumar303/olympia,mdaif/olympia,mozilla/addons-server,Revanth47/addons-server,eviljeff/olympia,Prashant-Surya/addons-server,ddurst/zamboni,Prashant-Surya/addons-server,mdaif/olympia,mstriemer/addons-server,andymckay/addons-server,jamesthechamp/zamboni,mstriemer/zamboni,eviljeff/zamboni,aviarypl/mozilla-l10n-addons-server,bqbn/addons-server,spasovski/zamboni,yfdyh000/olympia,yfdyh000/olympia,harry-7/addons-server,ayushagrawal288/zamboni,robhudson/zamboni,eviljeff/olympia,mozilla/addons-server,mstriemer/olympia,tsl143/zamboni,jpetto/olympia,kumar303/olympia,diox/zamboni,ddurst/zamboni,magopian/olympia,mstriemer/addons-server,eviljeff/zamboni,Joergen/olympia,Nolski/olympia,mudithkr/zamboni,Joergen/olympia,kumar303/addons-server,washort/zamboni,kumar303/zamboni,diox/olympia,harry-7/addons-server,wagnerand/addons-server,mstriemer/olympia,aviarypl/mozilla-l10n-addons-server,andymckay/olympia,Prashant-Surya/addons-server,mozilla/olympia,mstriemer/olympia,mstriemer/zamboni,beni55/olympia,SuriyaaKudoIsc/olympia,yfdyh000/olympia,clouserw/zamboni,andymckay/addons-server,Jobava/zamboni,beni55/olympia,jamesthechamp/zamboni,andymckay/zamboni,muffinresearch/olympia,Jobava/zamboni,mrrrgn/olympia,Nolski/olympia,muffinresearch/addons-server,muffinresearch/addons-server,andymckay/olympia,shahbaz17/zamboni,wagnerand/olympia,mozilla/zamboni,andymckay/olympia,clouserw/zamboni,ngokevin/zamboni,wagnerand/zamboni,jasonthomas/zamboni,koehlermichael/olympia,yfdyh000/olympia,kumar303/zamboni,luckylavish/zamboni,muffinresearch/olympia,robhudson/zamboni,ddurst/zamboni,anaran/olympia,mstriemer/addons-server,jpetto/olympia,kumar303/olympia,mdaif/olympia,magopian/olympia,ngokevin/zamboni,magopian/olympia,diox/olympia,beni55/olympia,elysium001/zamboni,kmaglione/olympia,Nolski/olympia,wagnerand/olympia,lavish205/olympia,kumar303/zamboni,wagnerand/addons-server,shahbaz17/zamboni,diox/zamboni,anaran/olympia,shahbaz17/zamboni,Joergen/zamboni,ayushagrawal288/zamboni,mrrrgn/olympia,kumar303/addons-server,spasovski/zamboni,atiqueahmedziad/addons-server,kmaglione/olympia,robhudson/zamboni,wagnerand/olympia,mozilla/addons-server,johancz/olympia,tsl143/zamboni,diox/zamboni,jasonthomas/zamboni,spasovski/zamboni,tsl143/addons-server,Witia1/olympia,aviarypl/mozilla-l10n-addons-server,Prashant-Surya/addons-server,kumar303/zamboni,harikishen/addons-server,robhudson/zamboni,Jobava/zamboni,Joergen/olympia,Witia1/olympia,muffinresearch/addons-server,koehlermichael/olympia,crdoconnor/olympia,Joergen/zamboni,mdaif/olympia,ayushagrawal288/zamboni,luckylavish/zamboni,Jobava/zamboni,bqbn/addons-server,elysium001/zamboni,mozilla/zamboni,koehlermichael/olympia,harikishen/addons-server,eviljeff/olympia,kmaglione/olympia,jpetto/olympia,atiqueahmedziad/addons-server,washort/zamboni,muffinresearch/olympia,mstriemer/olympia,Witia1/olympia,muffinresearch/addons-server,wagnerand/zamboni,Joergen/olympia,lavish205/olympia,kmaglione/olympia,yfdyh000/olympia,lavish205/olympia,magopian/olympia,shahbaz17/zamboni,aviarypl/mozilla-l10n-addons-server,tsl143/addons-server,Hitechverma/zamboni,SuriyaaKudoIsc/olympia,johancz/olympia,jpetto/olympia,beni55/olympia,luckylavish/zamboni,Revanth47/addons-server,mudithkr/zamboni,harikishen/addons-server,luckylavish/zamboni,washort/zamboni,jamesthechamp/zamboni,ingenioustechie/zamboni,atiqueahmedziad/addons-server,crdoconnor/olympia,elysium001/zamboni,mozilla/olympia,ingenioustechie/zamboni,Joergen/zamboni,harry-7/addons-server,wagnerand/addons-server,muffinresearch/olympia,mrrrgn/olympia,ngokevin/zamboni,mrrrgn/olympia,wagnerand/olympia,johancz/olympia,elysium001/zamboni,mdaif/olympia,tsl143/zamboni,Revanth47/addons-server,spasovski/zamboni,mudithkr/zamboni,johancz/olympia,mozilla/olympia,mrrrgn/olympia,psiinon/addons-server,crdoconnor/olympia,tsl143/addons-server,andymckay/olympia,koehlermichael/olympia,magopian/olympia,mstriemer/zamboni,diox/zamboni,andymckay/addons-server,ingenioustechie/zamboni,anaran/olympia,andymckay/zamboni,andymckay/zamboni,tsl143/addons-server,Nolski/olympia,andymckay/addons-server,crdoconnor/olympia,Hitechverma/zamboni,muffinresearch/olympia,mozilla/olympia,kmaglione/olympia,bqbn/addons-server,crdoconnor/olympia,lavish205/olympia
from tastypie import http from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request) Fix failing TestMarketplace test by ensuring vanilla authentication and authorization
from tastypie import http from tastypie.authentication import Authentication from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() # Ensure that we have proper authentication and authorization. self.resource._meta.authentication = Authentication() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request)
<commit_before>from tastypie import http from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request) <commit_msg>Fix failing TestMarketplace test by ensuring vanilla authentication and authorization<commit_after>
from tastypie import http from tastypie.authentication import Authentication from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() # Ensure that we have proper authentication and authorization. self.resource._meta.authentication = Authentication() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request)
from tastypie import http from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request) Fix failing TestMarketplace test by ensuring vanilla authentication and authorizationfrom tastypie import http from tastypie.authentication import Authentication from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() # Ensure that we have proper authentication and authorization. self.resource._meta.authentication = Authentication() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request)
<commit_before>from tastypie import http from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request) <commit_msg>Fix failing TestMarketplace test by ensuring vanilla authentication and authorization<commit_after>from tastypie import http from tastypie.authentication import Authentication from tastypie.authorization import Authorization from test_utils import RequestFactory from amo.tests import TestCase from mkt.api.base import MarketplaceResource class TestMarketplace(TestCase): def setUp(self): self.resource = MarketplaceResource() # Ensure that we have proper authentication and authorization. self.resource._meta.authentication = Authentication() self.resource._meta.authorization = Authorization() self.request = RequestFactory().post('/') def test_form_encoded(self): """ Regression test of bug #858403: ensure that a 400 (and not 500) is raised when an unsupported Content-Type header is passed to an API endpoint. """ self.request.META['CONTENT_TYPE'] = 'application/x-www-form-urlencoded' with self.assertImmediate(http.HttpBadRequest): self.resource.dispatch('list', self.request)
ea748053152c048f9ac763a4fb6b97a1815082df
wcsaxes/datasets/__init__.py
wcsaxes/datasets/__init__.py
"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits def msx_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/msx.fits", cache=cache) return fits.open(filename)[0] def rosat_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/rosat.fits", cache=cache) return fits.open(filename)[0] def twoMASS_k_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/2MASS_k.fits", cache=cache) return fits.open(filename)[0] def l1448_co_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/L1448_13CO_subset.fits", cache=cache) return fits.open(filename)[0] def bolocam_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/bolocam_v2.0.fits", cache=cache) return fits.open(filename)[0]
"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits URL = 'http://astrofrog.github.io/wcsaxes-datasets/' def get_hdu(filename, cache=True): path = download_file(URL + filename, cache=cache) return fits.open(path)[0] def msx_hdu(cache=True): return get_hdu('msx.fits', cache=cache) def rosat_hdu(cache=True): return get_hdu('rosat.fits', cache=cache) def twoMASS_k_hdu(cache=True): return get_hdu('2MASS_k.fits', cache=cache) def l1448_co_hdu(cache=True): return get_hdu('L1448_13CO_subset.fits', cache=cache) def bolocam_hdu(cache=True): return get_hdu('bolocam_v2.0.fits', cache=cache)
Simplify datasets functions a bit
Simplify datasets functions a bit
Python
bsd-3-clause
saimn/astropy,saimn/astropy,StuartLittlefair/astropy,dhomeier/astropy,astropy/astropy,stargaser/astropy,astropy/astropy,larrybradley/astropy,bsipocz/astropy,bsipocz/astropy,kelle/astropy,pllim/astropy,funbaker/astropy,dhomeier/astropy,mhvk/astropy,pllim/astropy,StuartLittlefair/astropy,astropy/astropy,dhomeier/astropy,astropy/astropy,kelle/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,saimn/astropy,joergdietrich/astropy,funbaker/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,astropy/astropy,saimn/astropy,tbabej/astropy,aleksandr-bakanov/astropy,pllim/astropy,stargaser/astropy,aleksandr-bakanov/astropy,tbabej/astropy,stargaser/astropy,saimn/astropy,DougBurke/astropy,kelle/astropy,tbabej/astropy,dhomeier/astropy,larrybradley/astropy,kelle/astropy,joergdietrich/astropy,lpsinger/astropy,DougBurke/astropy,tbabej/astropy,StuartLittlefair/astropy,joergdietrich/astropy,mhvk/astropy,DougBurke/astropy,AustereCuriosity/astropy,DougBurke/astropy,larrybradley/astropy,mhvk/astropy,AustereCuriosity/astropy,tbabej/astropy,AustereCuriosity/astropy,lpsinger/astropy,joergdietrich/astropy,bsipocz/astropy,mhvk/astropy,bsipocz/astropy,AustereCuriosity/astropy,larrybradley/astropy,AustereCuriosity/astropy,pllim/astropy,pllim/astropy,lpsinger/astropy,funbaker/astropy,MSeifert04/astropy,larrybradley/astropy,stargaser/astropy,StuartLittlefair/astropy,lpsinger/astropy,mhvk/astropy,kelle/astropy,funbaker/astropy,dhomeier/astropy,MSeifert04/astropy,StuartLittlefair/astropy,MSeifert04/astropy
"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits def msx_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/msx.fits", cache=cache) return fits.open(filename)[0] def rosat_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/rosat.fits", cache=cache) return fits.open(filename)[0] def twoMASS_k_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/2MASS_k.fits", cache=cache) return fits.open(filename)[0] def l1448_co_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/L1448_13CO_subset.fits", cache=cache) return fits.open(filename)[0] def bolocam_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/bolocam_v2.0.fits", cache=cache) return fits.open(filename)[0] Simplify datasets functions a bit
"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits URL = 'http://astrofrog.github.io/wcsaxes-datasets/' def get_hdu(filename, cache=True): path = download_file(URL + filename, cache=cache) return fits.open(path)[0] def msx_hdu(cache=True): return get_hdu('msx.fits', cache=cache) def rosat_hdu(cache=True): return get_hdu('rosat.fits', cache=cache) def twoMASS_k_hdu(cache=True): return get_hdu('2MASS_k.fits', cache=cache) def l1448_co_hdu(cache=True): return get_hdu('L1448_13CO_subset.fits', cache=cache) def bolocam_hdu(cache=True): return get_hdu('bolocam_v2.0.fits', cache=cache)
<commit_before>"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits def msx_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/msx.fits", cache=cache) return fits.open(filename)[0] def rosat_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/rosat.fits", cache=cache) return fits.open(filename)[0] def twoMASS_k_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/2MASS_k.fits", cache=cache) return fits.open(filename)[0] def l1448_co_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/L1448_13CO_subset.fits", cache=cache) return fits.open(filename)[0] def bolocam_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/bolocam_v2.0.fits", cache=cache) return fits.open(filename)[0] <commit_msg>Simplify datasets functions a bit<commit_after>
"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits URL = 'http://astrofrog.github.io/wcsaxes-datasets/' def get_hdu(filename, cache=True): path = download_file(URL + filename, cache=cache) return fits.open(path)[0] def msx_hdu(cache=True): return get_hdu('msx.fits', cache=cache) def rosat_hdu(cache=True): return get_hdu('rosat.fits', cache=cache) def twoMASS_k_hdu(cache=True): return get_hdu('2MASS_k.fits', cache=cache) def l1448_co_hdu(cache=True): return get_hdu('L1448_13CO_subset.fits', cache=cache) def bolocam_hdu(cache=True): return get_hdu('bolocam_v2.0.fits', cache=cache)
"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits def msx_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/msx.fits", cache=cache) return fits.open(filename)[0] def rosat_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/rosat.fits", cache=cache) return fits.open(filename)[0] def twoMASS_k_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/2MASS_k.fits", cache=cache) return fits.open(filename)[0] def l1448_co_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/L1448_13CO_subset.fits", cache=cache) return fits.open(filename)[0] def bolocam_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/bolocam_v2.0.fits", cache=cache) return fits.open(filename)[0] Simplify datasets functions a bit"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits URL = 'http://astrofrog.github.io/wcsaxes-datasets/' def get_hdu(filename, cache=True): path = download_file(URL + filename, cache=cache) return fits.open(path)[0] def msx_hdu(cache=True): return get_hdu('msx.fits', cache=cache) def rosat_hdu(cache=True): return get_hdu('rosat.fits', cache=cache) def twoMASS_k_hdu(cache=True): return get_hdu('2MASS_k.fits', cache=cache) def l1448_co_hdu(cache=True): return get_hdu('L1448_13CO_subset.fits', cache=cache) def bolocam_hdu(cache=True): return get_hdu('bolocam_v2.0.fits', cache=cache)
<commit_before>"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits def msx_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/msx.fits", cache=cache) return fits.open(filename)[0] def rosat_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/rosat.fits", cache=cache) return fits.open(filename)[0] def twoMASS_k_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/2MASS_k.fits", cache=cache) return fits.open(filename)[0] def l1448_co_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/L1448_13CO_subset.fits", cache=cache) return fits.open(filename)[0] def bolocam_hdu(cache=True): filename = download_file("http://astrofrog.github.io/wcsaxes-datasets/bolocam_v2.0.fits", cache=cache) return fits.open(filename)[0] <commit_msg>Simplify datasets functions a bit<commit_after>"""Downloads the FITS files that are used in image testing and for building documentation. """ from astropy.utils.data import download_file from astropy.io import fits URL = 'http://astrofrog.github.io/wcsaxes-datasets/' def get_hdu(filename, cache=True): path = download_file(URL + filename, cache=cache) return fits.open(path)[0] def msx_hdu(cache=True): return get_hdu('msx.fits', cache=cache) def rosat_hdu(cache=True): return get_hdu('rosat.fits', cache=cache) def twoMASS_k_hdu(cache=True): return get_hdu('2MASS_k.fits', cache=cache) def l1448_co_hdu(cache=True): return get_hdu('L1448_13CO_subset.fits', cache=cache) def bolocam_hdu(cache=True): return get_hdu('bolocam_v2.0.fits', cache=cache)
bf307c6fa8c7259d49da2888f91d6bce9fc921cd
src/idea/utility/state_helper.py
src/idea/utility/state_helper.py
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True)
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtures/state.json, use that instead. return State.objects.get(id=1)
Fix add_idea when multiple States have no previous
Fix add_idea when multiple States have no previous
Python
cc0-1.0
cfpb/idea-box,cfpb/idea-box,cfpb/idea-box
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True) Fix add_idea when multiple States have no previous
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtures/state.json, use that instead. return State.objects.get(id=1)
<commit_before>from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True) <commit_msg>Fix add_idea when multiple States have no previous<commit_after>
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtures/state.json, use that instead. return State.objects.get(id=1)
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True) Fix add_idea when multiple States have no previousfrom idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtures/state.json, use that instead. return State.objects.get(id=1)
<commit_before>from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True) <commit_msg>Fix add_idea when multiple States have no previous<commit_after>from idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtures/state.json, use that instead. return State.objects.get(id=1)
0948e7a25e79b01dae3c5b6cf9b0c272e2d196b7
moviepy/video/fx/scroll.py
moviepy/video/fx/scroll.py
import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = max(0, min(xmax, x_start+ np.round(x_speed*t))) y = max(0, min(ymax, y_start+ np.round(y_speed*t))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to)
import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = int(max(0, min(xmax, x_start+ np.round(x_speed*t)))) y = int(max(0, min(ymax, y_start+ np.round(y_speed*t)))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to)
Add int() wrapper to prevent floats
Add int() wrapper to prevent floats
Python
mit
Zulko/moviepy,ssteo/moviepy,kerstin/moviepy
import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = max(0, min(xmax, x_start+ np.round(x_speed*t))) y = max(0, min(ymax, y_start+ np.round(y_speed*t))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to) Add int() wrapper to prevent floats
import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = int(max(0, min(xmax, x_start+ np.round(x_speed*t)))) y = int(max(0, min(ymax, y_start+ np.round(y_speed*t)))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to)
<commit_before>import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = max(0, min(xmax, x_start+ np.round(x_speed*t))) y = max(0, min(ymax, y_start+ np.round(y_speed*t))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to) <commit_msg>Add int() wrapper to prevent floats<commit_after>
import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = int(max(0, min(xmax, x_start+ np.round(x_speed*t)))) y = int(max(0, min(ymax, y_start+ np.round(y_speed*t)))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to)
import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = max(0, min(xmax, x_start+ np.round(x_speed*t))) y = max(0, min(ymax, y_start+ np.round(y_speed*t))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to) Add int() wrapper to prevent floatsimport numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = int(max(0, min(xmax, x_start+ np.round(x_speed*t)))) y = int(max(0, min(ymax, y_start+ np.round(y_speed*t)))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to)
<commit_before>import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = max(0, min(xmax, x_start+ np.round(x_speed*t))) y = max(0, min(ymax, y_start+ np.round(y_speed*t))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to) <commit_msg>Add int() wrapper to prevent floats<commit_after>import numpy as np def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to="mask"): """ Scrolls horizontally or vertically a clip, e.g. to make end credits """ if h is None: h = clip.h if w is None: w = clip.w xmax = clip.w-w-1 ymax = clip.h-h-1 def f(gf,t): x = int(max(0, min(xmax, x_start+ np.round(x_speed*t)))) y = int(max(0, min(ymax, y_start+ np.round(y_speed*t)))) return gf(t)[y:y+h, x:x+w] return clip.fl(f, apply_to = apply_to)
88e7859d3cca4a07265fc831c3ef6952af3e3d70
test/connect_remote/TestConnectRemote.py
test/connect_remote/TestConnectRemote.py
""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" @unittest2.expectedFailure def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
Add an expectedFailure decorator to the test_connect_remote() test case. It fails when running within the context of the test suite, but succeeds when running alone.
Add an expectedFailure decorator to the test_connect_remote() test case. It fails when running within the context of the test suite, but succeeds when running alone. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@127290 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb
""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main() Add an expectedFailure decorator to the test_connect_remote() test case. It fails when running within the context of the test suite, but succeeds when running alone. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@127290 91177308-0d34-0410-b5e6-96231b3b80d8
""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" @unittest2.expectedFailure def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
<commit_before>""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main() <commit_msg>Add an expectedFailure decorator to the test_connect_remote() test case. It fails when running within the context of the test suite, but succeeds when running alone. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@127290 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" @unittest2.expectedFailure def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main() Add an expectedFailure decorator to the test_connect_remote() test case. It fails when running within the context of the test suite, but succeeds when running alone. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@127290 91177308-0d34-0410-b5e6-96231b3b80d8""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" @unittest2.expectedFailure def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
<commit_before>""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main() <commit_msg>Add an expectedFailure decorator to the test_connect_remote() test case. It fails when running within the context of the test suite, but succeeds when running alone. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@127290 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>""" Test lldb 'process connect' command. """ import os, time import unittest2 import lldb from lldbtest import * class ConnectRemoteTestCase(TestBase): mydir = "connect_remote" @unittest2.expectedFailure def test_connect_remote(self): """Test "process connect connect:://localhost:12345".""" # First, we'll start a fake debugserver (a simple echo server). import subprocess fakeserver = subprocess.Popen('./EchoServer.py') # This does the cleanup afterwards. def cleanup_fakeserver(): fakeserver.kill() fakeserver.wait() self.addTearDownHook(cleanup_fakeserver) self.runCmd("process connect connect://localhost:12345") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
9f13b732b68c62a90bbfd2f3fc9ad0c93d54f1e7
likes/utils.py
likes/utils.py
from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True
from secretballot.models import Vote from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False # Common predicate if Vote.objects.filter( object_id=modelbase_obj.id, token=request.secretballot_token ).count() != 0: return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True
Add common predicate for can_vote
Add common predicate for can_vote
Python
bsd-3-clause
sandow-digital/django-likes,just-work/django-likes,Afnarel/django-likes,Afnarel/django-likes,just-work/django-likes,chuck211991/django-likes,Afnarel/django-likes,sandow-digital/django-likes,chuck211991/django-likes,chuck211991/django-likes,just-work/django-likes
from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True Add common predicate for can_vote
from secretballot.models import Vote from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False # Common predicate if Vote.objects.filter( object_id=modelbase_obj.id, token=request.secretballot_token ).count() != 0: return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True
<commit_before>from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True <commit_msg>Add common predicate for can_vote<commit_after>
from secretballot.models import Vote from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False # Common predicate if Vote.objects.filter( object_id=modelbase_obj.id, token=request.secretballot_token ).count() != 0: return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True
from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True Add common predicate for can_votefrom secretballot.models import Vote from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False # Common predicate if Vote.objects.filter( object_id=modelbase_obj.id, token=request.secretballot_token ).count() != 0: return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True
<commit_before>from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True <commit_msg>Add common predicate for can_vote<commit_after>from secretballot.models import Vote from likes.signals import likes_enabled_test, can_vote_test from likes.exceptions import LikesNotEnabledException, CannotVoteException def _votes_enabled(obj): """See if voting is enabled on the class. Made complicated because secretballot.enable_voting_on takes parameters to set attribute names, so we can't safely check for eg. "add_vote" presence on obj. The safest bet is to check for the 'votes' attribute. The correct approach is to contact the secretballot developers and ask them to set some unique marker on a class that can be voted on.""" return hasattr(obj.__class__, 'votes') def likes_enabled(obj, request): if not _votes_enabled(obj): return False, None try: likes_enabled_test.send(obj, request=request) except LikesNotEnabledException: return False return True def can_vote(obj, user, request): if not _votes_enabled(obj): return False # Common predicate if Vote.objects.filter( object_id=modelbase_obj.id, token=request.secretballot_token ).count() != 0: return False try: can_vote_test.send(obj, user=user, request=request) except CannotVoteException: return False return True
e2962b3888a2a82cff8f0f01a213c0a123873f60
application.py
application.py
#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5000, ['./json_schemas']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run()
#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5001, ['./mappings']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run()
Fix local search-api port to 5001
Fix local search-api port to 5001 When upgrading dmutils I've copied the new `init_manager` code from the API but forgot to update the port. Also adds mappings to the list of watched locations for the development server, so the app will restart if the files are modified.
Python
mit
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5000, ['./json_schemas']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run() Fix local search-api port to 5001 When upgrading dmutils I've copied the new `init_manager` code from the API but forgot to update the port. Also adds mappings to the list of watched locations for the development server, so the app will restart if the files are modified.
#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5001, ['./mappings']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run()
<commit_before>#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5000, ['./json_schemas']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run() <commit_msg>Fix local search-api port to 5001 When upgrading dmutils I've copied the new `init_manager` code from the API but forgot to update the port. Also adds mappings to the list of watched locations for the development server, so the app will restart if the files are modified.<commit_after>
#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5001, ['./mappings']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run()
#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5000, ['./json_schemas']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run() Fix local search-api port to 5001 When upgrading dmutils I've copied the new `init_manager` code from the API but forgot to update the port. Also adds mappings to the list of watched locations for the development server, so the app will restart if the files are modified.#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5001, ['./mappings']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run()
<commit_before>#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5000, ['./json_schemas']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run() <commit_msg>Fix local search-api port to 5001 When upgrading dmutils I've copied the new `init_manager` code from the API but forgot to update the port. Also adds mappings to the list of watched locations for the development server, so the app will restart if the files are modified.<commit_after>#!/usr/bin/env python import os from dmutils import init_manager from app import create_app application = create_app(os.getenv('DM_ENVIRONMENT') or 'development') manager = init_manager(application, 5001, ['./mappings']) @manager.command def update_index(index_name): from app.main.services.search_service import create_index with application.app_context(): message, status = create_index(index_name) assert status == 200, message application.logger.info("Created index %s", index_name) if __name__ == '__main__': manager.run()
d51b9786b1cc72dd01549a8547f06efc27aab4c3
tests/test_settings.py
tests/test_settings.py
from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = """ .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """ CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "doctitle_xform": False, # important, to have even lone titles stay in the html fragment "sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle 'file_insertion_enabled': False, # SECURITY MEASURE (file hacking) 'raw_enabled': False, # SECURITY MEASURE (script tag) 'smart_quotes': "alt"} #"'language_code': "fr" ## SEEMS BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ]
from textwrap import dedent from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = dedent(""" .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """) CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "smart_quotes": "alt"} #"'language_code': "fr" # weirdly seems BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ]
Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
Python
bsd-3-clause
pakal/cmsplugin-rst,ojii/cmsplugin-rst
from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = """ .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """ CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "doctitle_xform": False, # important, to have even lone titles stay in the html fragment "sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle 'file_insertion_enabled': False, # SECURITY MEASURE (file hacking) 'raw_enabled': False, # SECURITY MEASURE (script tag) 'smart_quotes': "alt"} #"'language_code': "fr" ## SEEMS BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ] Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.
from textwrap import dedent from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = dedent(""" .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """) CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "smart_quotes": "alt"} #"'language_code': "fr" # weirdly seems BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ]
<commit_before> from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = """ .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """ CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "doctitle_xform": False, # important, to have even lone titles stay in the html fragment "sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle 'file_insertion_enabled': False, # SECURITY MEASURE (file hacking) 'raw_enabled': False, # SECURITY MEASURE (script tag) 'smart_quotes': "alt"} #"'language_code': "fr" ## SEEMS BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ] <commit_msg>Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.<commit_after>
from textwrap import dedent from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = dedent(""" .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """) CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "smart_quotes": "alt"} #"'language_code': "fr" # weirdly seems BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ]
from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = """ .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """ CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "doctitle_xform": False, # important, to have even lone titles stay in the html fragment "sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle 'file_insertion_enabled': False, # SECURITY MEASURE (file hacking) 'raw_enabled': False, # SECURITY MEASURE (script tag) 'smart_quotes': "alt"} #"'language_code': "fr" ## SEEMS BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ] Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable. from textwrap import dedent from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = dedent(""" .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """) CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "smart_quotes": "alt"} #"'language_code': "fr" # weirdly seems BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ]
<commit_before> from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = """ .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """ CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "doctitle_xform": False, # important, to have even lone titles stay in the html fragment "sectsubtitle_xform": False, # we disable the promotion of the title of a lone subsection to a subtitle 'file_insertion_enabled': False, # SECURITY MEASURE (file hacking) 'raw_enabled': False, # SECURITY MEASURE (script tag) 'smart_quotes': "alt"} #"'language_code': "fr" ## SEEMS BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ] <commit_msg>Make test settings skippable via CMSPLUGIN_RST_SKIP_CONF environment variable.<commit_after> from textwrap import dedent from test_app.settings import * INSTALLED_APPS += ("cmsplugin_rst",) ## DJANGO CMSPLUGIN RST CONF ## if not os.environ.get("CMSPLUGIN_RST_SKIP_CONF"): # use this flag to test the zero-conf case CMSPLUGIN_RST_WRITER_NAME = "html4css1" CMSPLUGIN_RST_CONTENT_PREFIX = dedent(""" .. |nbsp| unicode:: 0xA0 :trim: *Global Prefix: Start of Content* """) CMSPLUGIN_RST_CONTENT_SUFFIX = \ """*Global Suffix: End of Content*""" CMSPLUGIN_RST_SETTINGS_OVERRIDES = {"initial_header_level": 2, # minimum "h2" when rendered to html "smart_quotes": "alt"} #"'language_code': "fr" # weirdly seems BROKEN! def add_stuffs_to_soup(soup): soup.div.append("""String Appended Via Beautifulsoup Postprocessor""") CMSPLUGIN_RST_POSTPROCESSORS = [ "test_settings.add_stuffs_to_soup" ]
ecab0066c8ecd63c1aae85ffd04b970539eae71b
genderator/utils.py
genderator/utils.py
from unidecode import unidecode class Normalizer: def normalize(text): text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) # text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): return unidecode(text)
from unidecode import unidecode class Normalizer: def normalize(text): """ Normalize a given text applying all normalizations. Params: text: The text to be processed. Returns: The text normalized. """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): """ Remove hyphens from input text. Params: text: The text to be processed. Returns: The text without hyphens. """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): """ Remove extra whitespaces from input text. This function removes whitespaces from the beginning and the end of the string, but also duplicated whitespaces between words. Params: text: The text to be processed. Returns: The text without extra whitespaces. """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): """ Remove accent marks from input text. Params: text: The text to be processed. Returns: The text without accent marks. """ return unidecode(text)
Add accent marks normalization and missing docstrings
Add accent marks normalization and missing docstrings
Python
mit
davidmogar/genderator
from unidecode import unidecode class Normalizer: def normalize(text): text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) # text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): return unidecode(text)Add accent marks normalization and missing docstrings
from unidecode import unidecode class Normalizer: def normalize(text): """ Normalize a given text applying all normalizations. Params: text: The text to be processed. Returns: The text normalized. """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): """ Remove hyphens from input text. Params: text: The text to be processed. Returns: The text without hyphens. """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): """ Remove extra whitespaces from input text. This function removes whitespaces from the beginning and the end of the string, but also duplicated whitespaces between words. Params: text: The text to be processed. Returns: The text without extra whitespaces. """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): """ Remove accent marks from input text. Params: text: The text to be processed. Returns: The text without accent marks. """ return unidecode(text)
<commit_before>from unidecode import unidecode class Normalizer: def normalize(text): text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) # text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): return unidecode(text)<commit_msg>Add accent marks normalization and missing docstrings<commit_after>
from unidecode import unidecode class Normalizer: def normalize(text): """ Normalize a given text applying all normalizations. Params: text: The text to be processed. Returns: The text normalized. """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): """ Remove hyphens from input text. Params: text: The text to be processed. Returns: The text without hyphens. """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): """ Remove extra whitespaces from input text. This function removes whitespaces from the beginning and the end of the string, but also duplicated whitespaces between words. Params: text: The text to be processed. Returns: The text without extra whitespaces. """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): """ Remove accent marks from input text. Params: text: The text to be processed. Returns: The text without accent marks. """ return unidecode(text)
from unidecode import unidecode class Normalizer: def normalize(text): text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) # text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): return unidecode(text)Add accent marks normalization and missing docstringsfrom unidecode import unidecode class Normalizer: def normalize(text): """ Normalize a given text applying all normalizations. Params: text: The text to be processed. Returns: The text normalized. """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): """ Remove hyphens from input text. Params: text: The text to be processed. Returns: The text without hyphens. """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): """ Remove extra whitespaces from input text. This function removes whitespaces from the beginning and the end of the string, but also duplicated whitespaces between words. Params: text: The text to be processed. Returns: The text without extra whitespaces. """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): """ Remove accent marks from input text. Params: text: The text to be processed. Returns: The text without accent marks. """ return unidecode(text)
<commit_before>from unidecode import unidecode class Normalizer: def normalize(text): text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) # text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): return unidecode(text)<commit_msg>Add accent marks normalization and missing docstrings<commit_after>from unidecode import unidecode class Normalizer: def normalize(text): """ Normalize a given text applying all normalizations. Params: text: The text to be processed. Returns: The text normalized. """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): """ Remove hyphens from input text. Params: text: The text to be processed. Returns: The text without hyphens. """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): """ Remove extra whitespaces from input text. This function removes whitespaces from the beginning and the end of the string, but also duplicated whitespaces between words. Params: text: The text to be processed. Returns: The text without extra whitespaces. """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): """ Remove accent marks from input text. Params: text: The text to be processed. Returns: The text without accent marks. """ return unidecode(text)
1a43628a42f249a69e0f8846ebbf88ef0af9bb9d
flow/__init__.py
flow/__init__.py
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
Make PersistenceSettings a top-level export
Make PersistenceSettings a top-level export
Python
mit
JohnVinyard/featureflow,JohnVinyard/featureflow
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase Make PersistenceSettings a top-level export
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
<commit_before>from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase <commit_msg>Make PersistenceSettings a top-level export<commit_after>
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase Make PersistenceSettings a top-level exportfrom model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
<commit_before>from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase <commit_msg>Make PersistenceSettings a top-level export<commit_after>from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, KeyBuilder \ , StringDelimitedKeyBuilder, Database \ , FileSystemDatabase, InMemoryDatabase from datawriter import DataWriter from nmpy import StreamingNumpyDecoder, NumpyMetaData from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings
4ac6905ee4867c038f0574c9c14827164c10f6a6
tools/unicode_tests.py
tools/unicode_tests.py
# coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest, sys from IPython.core import compilerop assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") if __name__ == "__main__": unittest.main()
#!/usr/bin/env python # coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest import sys, os.path from IPython.core import ipapi from IPython.core import compilerop from IPython.core.history import HistoryManager from IPython.utils.tempdir import TemporaryDirectory assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") class HistoryTest(unittest.TestCase): def test_reload_unicode(self): ip = ipapi.get() with TemporaryDirectory() as tmpdir: histfile = os.path.realpath(os.path.join(tmpdir, 'history.json')) # Ensure that we restore the history management that we mess with in # this test doesn't affect the IPython instance used by the test suite # beyond this test. hist_manager_ori = ip.history_manager try: ip.history_manager = HistoryManager(ip) ip.history_manager.hist_file = histfile print 'test',histfile hist = [u"t = 'žćčšđ'"] # test save and load ip.history_manager.input_hist_raw[:] = [] for h in hist: ip.history_manager.input_hist_raw.append(h) ip.save_history() ip.history_manager.input_hist_raw[:] = [] ip.reload_history() self.assert_equal(len(ip.history_manager.input_hist_raw), len(hist)) for i,h in enumerate(hist): nt.assert_equal(hist[i], ip.history_manager.input_hist_raw[i]) finally: # Restore history manager ip.history_manager = hist_manager_ori if __name__ == "__main__": unittest.main()
Add test for reloading history including unicode (currently fails).
Add test for reloading history including unicode (currently fails).
Python
bsd-3-clause
ipython/ipython,ipython/ipython
# coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest, sys from IPython.core import compilerop assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") if __name__ == "__main__": unittest.main() Add test for reloading history including unicode (currently fails).
#!/usr/bin/env python # coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest import sys, os.path from IPython.core import ipapi from IPython.core import compilerop from IPython.core.history import HistoryManager from IPython.utils.tempdir import TemporaryDirectory assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") class HistoryTest(unittest.TestCase): def test_reload_unicode(self): ip = ipapi.get() with TemporaryDirectory() as tmpdir: histfile = os.path.realpath(os.path.join(tmpdir, 'history.json')) # Ensure that we restore the history management that we mess with in # this test doesn't affect the IPython instance used by the test suite # beyond this test. hist_manager_ori = ip.history_manager try: ip.history_manager = HistoryManager(ip) ip.history_manager.hist_file = histfile print 'test',histfile hist = [u"t = 'žćčšđ'"] # test save and load ip.history_manager.input_hist_raw[:] = [] for h in hist: ip.history_manager.input_hist_raw.append(h) ip.save_history() ip.history_manager.input_hist_raw[:] = [] ip.reload_history() self.assert_equal(len(ip.history_manager.input_hist_raw), len(hist)) for i,h in enumerate(hist): nt.assert_equal(hist[i], ip.history_manager.input_hist_raw[i]) finally: # Restore history manager ip.history_manager = hist_manager_ori if __name__ == "__main__": unittest.main()
<commit_before># coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest, sys from IPython.core import compilerop assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") if __name__ == "__main__": unittest.main() <commit_msg>Add test for reloading history including unicode (currently fails).<commit_after>
#!/usr/bin/env python # coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest import sys, os.path from IPython.core import ipapi from IPython.core import compilerop from IPython.core.history import HistoryManager from IPython.utils.tempdir import TemporaryDirectory assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") class HistoryTest(unittest.TestCase): def test_reload_unicode(self): ip = ipapi.get() with TemporaryDirectory() as tmpdir: histfile = os.path.realpath(os.path.join(tmpdir, 'history.json')) # Ensure that we restore the history management that we mess with in # this test doesn't affect the IPython instance used by the test suite # beyond this test. hist_manager_ori = ip.history_manager try: ip.history_manager = HistoryManager(ip) ip.history_manager.hist_file = histfile print 'test',histfile hist = [u"t = 'žćčšđ'"] # test save and load ip.history_manager.input_hist_raw[:] = [] for h in hist: ip.history_manager.input_hist_raw.append(h) ip.save_history() ip.history_manager.input_hist_raw[:] = [] ip.reload_history() self.assert_equal(len(ip.history_manager.input_hist_raw), len(hist)) for i,h in enumerate(hist): nt.assert_equal(hist[i], ip.history_manager.input_hist_raw[i]) finally: # Restore history manager ip.history_manager = hist_manager_ori if __name__ == "__main__": unittest.main()
# coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest, sys from IPython.core import compilerop assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") if __name__ == "__main__": unittest.main() Add test for reloading history including unicode (currently fails).#!/usr/bin/env python # coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest import sys, os.path from IPython.core import ipapi from IPython.core import compilerop from IPython.core.history import HistoryManager from IPython.utils.tempdir import TemporaryDirectory assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") class HistoryTest(unittest.TestCase): def test_reload_unicode(self): ip = ipapi.get() with TemporaryDirectory() as tmpdir: histfile = os.path.realpath(os.path.join(tmpdir, 'history.json')) # Ensure that we restore the history management that we mess with in # this test doesn't affect the IPython instance used by the test suite # beyond this test. hist_manager_ori = ip.history_manager try: ip.history_manager = HistoryManager(ip) ip.history_manager.hist_file = histfile print 'test',histfile hist = [u"t = 'žćčšđ'"] # test save and load ip.history_manager.input_hist_raw[:] = [] for h in hist: ip.history_manager.input_hist_raw.append(h) ip.save_history() ip.history_manager.input_hist_raw[:] = [] ip.reload_history() self.assert_equal(len(ip.history_manager.input_hist_raw), len(hist)) for i,h in enumerate(hist): nt.assert_equal(hist[i], ip.history_manager.input_hist_raw[i]) finally: # Restore history manager ip.history_manager = hist_manager_ori if __name__ == "__main__": unittest.main()
<commit_before># coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest, sys from IPython.core import compilerop assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") if __name__ == "__main__": unittest.main() <commit_msg>Add test for reloading history including unicode (currently fails).<commit_after>#!/usr/bin/env python # coding: utf-8 """These tests have to be run separately from the main test suite (iptest), because that sets the default encoding to utf-8, and it cannot be changed after the interpreter is up and running. The default encoding in a Python 2.x environment is ASCII.""" import unittest import sys, os.path from IPython.core import ipapi from IPython.core import compilerop from IPython.core.history import HistoryManager from IPython.utils.tempdir import TemporaryDirectory assert sys.getdefaultencoding() == "ascii" class CompileropTest(unittest.TestCase): def test_accept_unicode(self): cp = compilerop.CachingCompiler() cp(u"t = 'žćčšđ'", "single") class HistoryTest(unittest.TestCase): def test_reload_unicode(self): ip = ipapi.get() with TemporaryDirectory() as tmpdir: histfile = os.path.realpath(os.path.join(tmpdir, 'history.json')) # Ensure that we restore the history management that we mess with in # this test doesn't affect the IPython instance used by the test suite # beyond this test. hist_manager_ori = ip.history_manager try: ip.history_manager = HistoryManager(ip) ip.history_manager.hist_file = histfile print 'test',histfile hist = [u"t = 'žćčšđ'"] # test save and load ip.history_manager.input_hist_raw[:] = [] for h in hist: ip.history_manager.input_hist_raw.append(h) ip.save_history() ip.history_manager.input_hist_raw[:] = [] ip.reload_history() self.assert_equal(len(ip.history_manager.input_hist_raw), len(hist)) for i,h in enumerate(hist): nt.assert_equal(hist[i], ip.history_manager.input_hist_raw[i]) finally: # Restore history manager ip.history_manager = hist_manager_ori if __name__ == "__main__": unittest.main()
78aabbc9c66bc92fdedec740e32ad9fbd9ee8937
pygraphc/clustering/ConnectedComponents.py
pygraphc/clustering/ConnectedComponents.py
import networkx as nx class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, g): """This is a constructor for ConnectedComponent class Parameters ---------- g : graph a graph to be clustered """ self.g = g def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. Returns ------- clusters : list[list] List of cluster list, where each list contains index (line number) of event log. """ clusters = [] for components in nx.connected_components(self.g): clusters.append(components) cluster_id = 0 for cluster in clusters: for node in cluster: self.g.node[node]['cluster'] = cluster_id cluster_id += 1 return clusters
import networkx as nx from ClusterUtility import ClusterUtility class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, graph): """This is a constructor for ConnectedComponent class. Parameters ---------- graph : graph A graph to be clustered. """ self.graph = graph def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. This method heavily rely on the cosine similarity threshold to build an edge in a graph. Returns ------- clusters : dict[list] Dictionary of cluster list, where each list contains index (line number) of event log. """ clusters = {} cluster_id = 0 for components in nx.connected_components(self.graph): clusters[cluster_id] = components cluster_id += 1 ClusterUtility.set_cluster_id(self.graph, clusters) return clusters
Change cluster data structure from list to dict
Change cluster data structure from list to dict
Python
mit
studiawan/pygraphc
import networkx as nx class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, g): """This is a constructor for ConnectedComponent class Parameters ---------- g : graph a graph to be clustered """ self.g = g def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. Returns ------- clusters : list[list] List of cluster list, where each list contains index (line number) of event log. """ clusters = [] for components in nx.connected_components(self.g): clusters.append(components) cluster_id = 0 for cluster in clusters: for node in cluster: self.g.node[node]['cluster'] = cluster_id cluster_id += 1 return clusters Change cluster data structure from list to dict
import networkx as nx from ClusterUtility import ClusterUtility class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, graph): """This is a constructor for ConnectedComponent class. Parameters ---------- graph : graph A graph to be clustered. """ self.graph = graph def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. This method heavily rely on the cosine similarity threshold to build an edge in a graph. Returns ------- clusters : dict[list] Dictionary of cluster list, where each list contains index (line number) of event log. """ clusters = {} cluster_id = 0 for components in nx.connected_components(self.graph): clusters[cluster_id] = components cluster_id += 1 ClusterUtility.set_cluster_id(self.graph, clusters) return clusters
<commit_before>import networkx as nx class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, g): """This is a constructor for ConnectedComponent class Parameters ---------- g : graph a graph to be clustered """ self.g = g def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. Returns ------- clusters : list[list] List of cluster list, where each list contains index (line number) of event log. """ clusters = [] for components in nx.connected_components(self.g): clusters.append(components) cluster_id = 0 for cluster in clusters: for node in cluster: self.g.node[node]['cluster'] = cluster_id cluster_id += 1 return clusters <commit_msg>Change cluster data structure from list to dict<commit_after>
import networkx as nx from ClusterUtility import ClusterUtility class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, graph): """This is a constructor for ConnectedComponent class. Parameters ---------- graph : graph A graph to be clustered. """ self.graph = graph def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. This method heavily rely on the cosine similarity threshold to build an edge in a graph. Returns ------- clusters : dict[list] Dictionary of cluster list, where each list contains index (line number) of event log. """ clusters = {} cluster_id = 0 for components in nx.connected_components(self.graph): clusters[cluster_id] = components cluster_id += 1 ClusterUtility.set_cluster_id(self.graph, clusters) return clusters
import networkx as nx class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, g): """This is a constructor for ConnectedComponent class Parameters ---------- g : graph a graph to be clustered """ self.g = g def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. Returns ------- clusters : list[list] List of cluster list, where each list contains index (line number) of event log. """ clusters = [] for components in nx.connected_components(self.g): clusters.append(components) cluster_id = 0 for cluster in clusters: for node in cluster: self.g.node[node]['cluster'] = cluster_id cluster_id += 1 return clusters Change cluster data structure from list to dictimport networkx as nx from ClusterUtility import ClusterUtility class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, graph): """This is a constructor for ConnectedComponent class. Parameters ---------- graph : graph A graph to be clustered. """ self.graph = graph def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. This method heavily rely on the cosine similarity threshold to build an edge in a graph. Returns ------- clusters : dict[list] Dictionary of cluster list, where each list contains index (line number) of event log. """ clusters = {} cluster_id = 0 for components in nx.connected_components(self.graph): clusters[cluster_id] = components cluster_id += 1 ClusterUtility.set_cluster_id(self.graph, clusters) return clusters
<commit_before>import networkx as nx class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, g): """This is a constructor for ConnectedComponent class Parameters ---------- g : graph a graph to be clustered """ self.g = g def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. Returns ------- clusters : list[list] List of cluster list, where each list contains index (line number) of event log. """ clusters = [] for components in nx.connected_components(self.g): clusters.append(components) cluster_id = 0 for cluster in clusters: for node in cluster: self.g.node[node]['cluster'] = cluster_id cluster_id += 1 return clusters <commit_msg>Change cluster data structure from list to dict<commit_after>import networkx as nx from ClusterUtility import ClusterUtility class ConnectedComponents: """This is a class for connected component detection method to cluster event logs [1]_. References ---------- .. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496. """ def __init__(self, graph): """This is a constructor for ConnectedComponent class. Parameters ---------- graph : graph A graph to be clustered. """ self.graph = graph def get_clusters(self): """This method find any connected component in a graph. A component represents a cluster and each component will be given a cluster identifier. This method heavily rely on the cosine similarity threshold to build an edge in a graph. Returns ------- clusters : dict[list] Dictionary of cluster list, where each list contains index (line number) of event log. """ clusters = {} cluster_id = 0 for components in nx.connected_components(self.graph): clusters[cluster_id] = components cluster_id += 1 ClusterUtility.set_cluster_id(self.graph, clusters) return clusters
a39b1e480499c19eee1e7f341964f7c94d6cdbad
st2auth_ldap_backend/__init__.py
st2auth_ldap_backend/__init__.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1'
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from .ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1'
Fix code so it works under Python 3.
Fix code so it works under Python 3.
Python
apache-2.0
StackStorm/st2-auth-backend-ldap
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1' Fix code so it works under Python 3.
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from .ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1'
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1' <commit_msg>Fix code so it works under Python 3.<commit_after>
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from .ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1'
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1' Fix code so it works under Python 3.# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from .ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1'
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1' <commit_msg>Fix code so it works under Python 3.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from .ldap_backend import LDAPAuthenticationBackend __all__ = [ 'LDAPAuthenticationBackend' ] __version__ = '0.1.1'
51dcf283fda1492d8e75da2c036848ab4149030d
imagekit/conf.py
imagekit/conf.py
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' return value
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: if getattr(settings, 'CACHES', None): value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' else: value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND return value
Fix default cache backend for Django < 1.3
Fix default cache backend for Django < 1.3
Python
bsd-3-clause
tawanda/django-imagekit,FundedByMe/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' return value Fix default cache backend for Django < 1.3
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: if getattr(settings, 'CACHES', None): value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' else: value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND return value
<commit_before>from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' return value <commit_msg>Fix default cache backend for Django < 1.3<commit_after>
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: if getattr(settings, 'CACHES', None): value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' else: value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND return value
from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' return value Fix default cache backend for Django < 1.3from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: if getattr(settings, 'CACHES', None): value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' else: value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND return value
<commit_before>from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' return value <commit_msg>Fix default cache backend for Django < 1.3<commit_after>from appconf import AppConf from django.conf import settings class ImageKitConf(AppConf): CACHEFILE_NAMER = 'imagekit.cachefiles.namers.hash' SPEC_CACHEFILE_NAMER = 'imagekit.cachefiles.namers.source_name_as_path' CACHEFILE_DIR = 'CACHE/images' DEFAULT_CACHEFILE_BACKEND = 'imagekit.cachefiles.backends.Simple' DEFAULT_CACHEFILE_STRATEGY = 'imagekit.cachefiles.strategies.JustInTime' DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' CACHE_BACKEND = None CACHE_PREFIX = 'imagekit:' def configure_cache_backend(self, value): if value is None: if getattr(settings, 'CACHES', None): value = 'django.core.cache.backends.dummy.DummyCache' if settings.DEBUG else 'default' else: value = 'dummy://' if settings.DEBUG else settings.CACHE_BACKEND return value
53a1c2ccbd43a8fcb90d8d9b7814c8ed129c0635
thumbor_botornado/s3_http_loader.py
thumbor_botornado/s3_http_loader.py
from botornado.s3.bucket import AsyncBucket from botornado.s3.connection import AsyncS3Connection from botornado.s3.key import AsyncKey from thumbor_botornado.s3_loader import S3Loader from thumbor.loaders.http_loader import HttpLoader def load(context, url, callback): p = re.compile('/^https?:/i') m = p.match(url) if m: HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback)
import thumbor_botornado.s3_loader as S3Loader import thumbor.loaders.http_loader as HttpLoader import re def load(context, url, callback): if re.match('https?:', url, re.IGNORECASE): HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback)
Add s3-http loader which delegates based on the uri
Add s3-http loader which delegates based on the uri
Python
mit
99designs/thumbor_botornado,Jimdo/thumbor_botornado,99designs/thumbor_botornado,Jimdo/thumbor_botornado
from botornado.s3.bucket import AsyncBucket from botornado.s3.connection import AsyncS3Connection from botornado.s3.key import AsyncKey from thumbor_botornado.s3_loader import S3Loader from thumbor.loaders.http_loader import HttpLoader def load(context, url, callback): p = re.compile('/^https?:/i') m = p.match(url) if m: HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback) Add s3-http loader which delegates based on the uri
import thumbor_botornado.s3_loader as S3Loader import thumbor.loaders.http_loader as HttpLoader import re def load(context, url, callback): if re.match('https?:', url, re.IGNORECASE): HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback)
<commit_before>from botornado.s3.bucket import AsyncBucket from botornado.s3.connection import AsyncS3Connection from botornado.s3.key import AsyncKey from thumbor_botornado.s3_loader import S3Loader from thumbor.loaders.http_loader import HttpLoader def load(context, url, callback): p = re.compile('/^https?:/i') m = p.match(url) if m: HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback) <commit_msg>Add s3-http loader which delegates based on the uri<commit_after>
import thumbor_botornado.s3_loader as S3Loader import thumbor.loaders.http_loader as HttpLoader import re def load(context, url, callback): if re.match('https?:', url, re.IGNORECASE): HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback)
from botornado.s3.bucket import AsyncBucket from botornado.s3.connection import AsyncS3Connection from botornado.s3.key import AsyncKey from thumbor_botornado.s3_loader import S3Loader from thumbor.loaders.http_loader import HttpLoader def load(context, url, callback): p = re.compile('/^https?:/i') m = p.match(url) if m: HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback) Add s3-http loader which delegates based on the uriimport thumbor_botornado.s3_loader as S3Loader import thumbor.loaders.http_loader as HttpLoader import re def load(context, url, callback): if re.match('https?:', url, re.IGNORECASE): HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback)
<commit_before>from botornado.s3.bucket import AsyncBucket from botornado.s3.connection import AsyncS3Connection from botornado.s3.key import AsyncKey from thumbor_botornado.s3_loader import S3Loader from thumbor.loaders.http_loader import HttpLoader def load(context, url, callback): p = re.compile('/^https?:/i') m = p.match(url) if m: HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback) <commit_msg>Add s3-http loader which delegates based on the uri<commit_after>import thumbor_botornado.s3_loader as S3Loader import thumbor.loaders.http_loader as HttpLoader import re def load(context, url, callback): if re.match('https?:', url, re.IGNORECASE): HttpLoader.load(context, url, callback) else: S3Loader.load(context, url, callback)
be83b28d5a2dedd8caa39cfac57f398af69b2042
piper/utils.py
piper/utils.py
class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name)
class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): if isinstance(other, dict): # If we are comparing to a dict, just check directly return self.data == other return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name)
Make dict comparisons possible for DotDicts
Make dict comparisons possible for DotDicts
Python
mit
thiderman/piper
class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name) Make dict comparisons possible for DotDicts
class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): if isinstance(other, dict): # If we are comparing to a dict, just check directly return self.data == other return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name)
<commit_before>class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name) <commit_msg>Make dict comparisons possible for DotDicts<commit_after>
class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): if isinstance(other, dict): # If we are comparing to a dict, just check directly return self.data == other return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name)
class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name) Make dict comparisons possible for DotDictsclass DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): if isinstance(other, dict): # If we are comparing to a dict, just check directly return self.data == other return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name)
<commit_before>class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name) <commit_msg>Make dict comparisons possible for DotDicts<commit_after>class DotDict(object): """ Immutable dict-like objects accessible by dot notation Used because the amount of configuration access is very high and just using dots instead of the dict notation feels good. """ def __init__(self, data): self.data = data def __repr__(self): # pragma: nocover return '<DotDict {}>'.format(self.data) def __getattr__(self, key): if key in ('values', 'keys', 'items'): # Dict methods, just return and run them. return getattr(self.data, key) val = self.data[key] if isinstance(val, dict): val = DotDict(val) return val def __eq__(self, other): if isinstance(other, dict): # If we are comparing to a dict, just check directly return self.data == other return self.data == other.data # So that we can still access as dicts __getitem__ = __getattr__ def dynamic_load(target): """ Dynamically import a class and return it This is used by the core parts of the main configuration file since one of the main features is to let the user specify which class to use. """ split = target.split('.') module_name = '.'.join(split[:-1]) class_name = split[-1] mod = __import__(module_name, fromlist=[class_name]) return getattr(mod, class_name)
e331e49d39b6e60f953a982c90f800cd07dfbc2d
libptp/tools/wapiti/signatures.py
libptp/tools/wapiti/signatures.py
""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, }
""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, 'CRLF': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, }
Add one signature for wapiti 2.2.1
[wapitit] Add one signature for wapiti 2.2.1
Python
bsd-3-clause
DoomTaper/ptp,owtf/ptp
""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, } [wapitit] Add one signature for wapiti 2.2.1
""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, 'CRLF': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, }
<commit_before>""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, } <commit_msg>[wapitit] Add one signature for wapiti 2.2.1<commit_after>
""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, 'CRLF': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, }
""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, } [wapitit] Add one signature for wapiti 2.2.1""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, 'CRLF': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, }
<commit_before>""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, } <commit_msg>[wapitit] Add one signature for wapiti 2.2.1<commit_after>""" Wapiti does not provide ranking for the vulnerabilities it has found. This file tries to define a ranking for every vulnerability Wapiti might find. """ from libptp.constants import HIGH, MEDIUM, LOW, INFO SIGNATURES = { # High ranked vulnerabilities 'SQL Injection': HIGH, 'Blind SQL Injection': HIGH, 'Command execution': HIGH, # Medium ranked vulnerabilities 'Htaccess Bypass': MEDIUM, 'Cross Site Scripting': MEDIUM, 'CRLF Injection': MEDIUM, 'CRLF': MEDIUM, # Low ranked vulnerabilities 'File Handling': LOW, # a.k.a Path or Directory listing 'Resource consumption': LOW, # TODO: Is this higher than LOW? # Informational ranked vulnerabilities 'Backup file': INFO, 'Potentially dangerous file': INFO, # TODO: Is this higher than INFO? 'Internal Server Error': INFO, }
cca106b4cb647e82838deb359cf6f9ef813992a9
dbaas/integrations/credentials/admin/integration_credential.py
dbaas/integrations/credentials/admin/integration_credential.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = True
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
Change field order at integration credential admin index page
Change field order at integration credential admin index page
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = TrueChange field order at integration credential admin index page
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = True<commit_msg>Change field order at integration credential admin index page<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = TrueChange field order at integration credential admin index page# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = True<commit_msg>Change field order at integration credential admin index page<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
97cc8b9dd87d43e38d6ff2a20dc4cab2ffcc3d54
tests/test_requests.py
tests/test_requests.py
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
Use EURUSD for test order.
Use EURUSD for test order.
Python
bsd-2-clause
erdewit/ib_insync,erdewit/ib_insync
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary) Use EURUSD for test order.
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
<commit_before>import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary) <commit_msg>Use EURUSD for test order.<commit_after>
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary) Use EURUSD for test order.import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
<commit_before>import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary) <commit_msg>Use EURUSD for test order.<commit_after>import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
99609e3643c320484c7978440ffedd892f8bb088
Toolkit/PlayPen/xscale_2_hkl.py
Toolkit/PlayPen/xscale_2_hkl.py
from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1)
from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) if s[j] >= 0.0: print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1)
Exclude reflections with -ve sigma (needed when reading XDS_ASCII.HKL as input)
Exclude reflections with -ve sigma (needed when reading XDS_ASCII.HKL as input)
Python
bsd-3-clause
xia2/xia2,xia2/xia2
from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1) Exclude reflections with -ve sigma (needed when reading XDS_ASCII.HKL as input)
from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) if s[j] >= 0.0: print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1)
<commit_before>from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1) <commit_msg>Exclude reflections with -ve sigma (needed when reading XDS_ASCII.HKL as input)<commit_after>
from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) if s[j] >= 0.0: print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1)
from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1) Exclude reflections with -ve sigma (needed when reading XDS_ASCII.HKL as input)from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) if s[j] >= 0.0: print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1)
<commit_before>from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1) <commit_msg>Exclude reflections with -ve sigma (needed when reading XDS_ASCII.HKL as input)<commit_after>from iotbx.xds.read_ascii import reader import sys for n, argv in enumerate(sys.argv[1:]): r = reader(open(argv)) mas = r.as_miller_arrays(merge_equivalents = False) assert(len(mas) == 1) ma = mas[0].apply_scaling(target_max = 9.99e5) i = ma.data() s = ma.sigmas() hkl = ma.indices() for j, h in enumerate(hkl): _i = ('%f' % i[j])[:7] assert('.' in _i) _s = ('%f' % s[j])[:7] assert('.' in _s) if s[j] >= 0.0: print '%4d%4d%4d%8s%8s%4d' % (h[0], h[1], h[2], _i, _s, n + 1)
06bed92f55396c33a4f55c109fdb7871737fea7a
query/forms.py
query/forms.py
""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) )
""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'autofocus': 'autofocus', 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) )
Add HTML5 autofocus attribute to query form field.
Add HTML5 autofocus attribute to query form field.
Python
mit
cdubz/rdap-explorer,cdubz/rdap-explorer
""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) ) Add HTML5 autofocus attribute to query form field.
""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'autofocus': 'autofocus', 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) )
<commit_before>""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) ) <commit_msg>Add HTML5 autofocus attribute to query form field.<commit_after>
""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'autofocus': 'autofocus', 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) )
""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) ) Add HTML5 autofocus attribute to query form field.""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'autofocus': 'autofocus', 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) )
<commit_before>""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) ) <commit_msg>Add HTML5 autofocus attribute to query form field.<commit_after>""" Forms for the rdap_explorer project, query app. """ from django import forms class QueryForm(forms.Form): query = forms.CharField( label='', max_length=45, # Max length of an IPv6 address. widget=forms.TextInput(attrs={ 'autofocus': 'autofocus', 'class': 'form-input input-lg', # TODO: Move this in theme! 'placeholder': 'IPv4/6 address' }) )
e09d60380f626502532e78494314f9ed97eca7c8
build-cutline-map.py
build-cutline-map.py
#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy()
#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(26915) cutline_srs = osr.SpatialReference() cutline_srs.ImportFromEPSG(4326) coord_trans = osr.CoordinateTransformation(cutline_srs, srs) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() poly.Transform(coord_trans) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy()
Put cutline map in 26915
Put cutline map in 26915
Python
mit
simonsonc/mn-glo-mosaic,simonsonc/mn-glo-mosaic,simonsonc/mn-glo-mosaic
#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy() Put cutline map in 26915
#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(26915) cutline_srs = osr.SpatialReference() cutline_srs.ImportFromEPSG(4326) coord_trans = osr.CoordinateTransformation(cutline_srs, srs) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() poly.Transform(coord_trans) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy()
<commit_before>#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy() <commit_msg>Put cutline map in 26915<commit_after>
#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(26915) cutline_srs = osr.SpatialReference() cutline_srs.ImportFromEPSG(4326) coord_trans = osr.CoordinateTransformation(cutline_srs, srs) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() poly.Transform(coord_trans) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy()
#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy() Put cutline map in 26915#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(26915) cutline_srs = osr.SpatialReference() cutline_srs.ImportFromEPSG(4326) coord_trans = osr.CoordinateTransformation(cutline_srs, srs) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() poly.Transform(coord_trans) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy()
<commit_before>#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy() <commit_msg>Put cutline map in 26915<commit_after>#!/usr/bin/env python from osgeo import ogr from osgeo import osr from glob import glob import os.path driver = ogr.GetDriverByName("ESRI Shapefile") ds = driver.CreateDataSource("summary-maps/cutline-map.shp") srs = osr.SpatialReference() srs.ImportFromEPSG(26915) cutline_srs = osr.SpatialReference() cutline_srs.ImportFromEPSG(4326) coord_trans = osr.CoordinateTransformation(cutline_srs, srs) layer = ds.CreateLayer("tiles", srs, ogr.wkbPolygon) field_name = ogr.FieldDefn("Name", ogr.OFTString) field_name.SetWidth(16) layer.CreateField(field_name) for fn in glob("cutlines/*.json"): tile_id = os.path.splitext(os.path.basename(fn))[0] cutline_ds = ogr.Open(fn) cutline_layer = cutline_ds.GetLayerByIndex(0) cutline_feature = cutline_layer.GetNextFeature() while cutline_feature: poly = cutline_feature.GetGeometryRef().Clone() poly.Transform(coord_trans) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetField("Name", tile_id) feature.SetGeometry(poly) layer.CreateFeature(feature) feature.Destroy() cutline_feature = cutline_layer.GetNextFeature() ds.Destroy()
6d7910deebeb68e12c7d7f721c54ada031560024
src/WhiteLibrary/keywords/items/textbox.py
src/WhiteLibrary/keywords/items/textbox.py
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
Change to better argument name
Change to better argument name
Python
apache-2.0
Omenia/robotframework-whitelibrary,Omenia/robotframework-whitelibrary
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text Change to better argument name
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
<commit_before>from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text <commit_msg>Change to better argument name<commit_after>
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text Change to better argument namefrom TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
<commit_before>from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text <commit_msg>Change to better argument name<commit_after>from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text