commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
40af69656b71cda7f775cface3478106f070ed35
numba/__init__.py
numba/__init__.py
import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). logging.basicConfig(#level=logging.DEBUG, format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s") try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__
import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). class _RedirectingHandler(logging.Handler): ''' A log hanlder that applies its formatter and redirect the emission to a parent handler. ''' def set_handler(self, handler): self.handler = handler def emit(self, record): # apply our own formatting record.msg = self.format(record) record.args = [] # clear the args # use parent handler to emit record self.handler.emit(record) def _config_logger(): root = logging.getLogger(__name__) format = "\n\033[1m%(levelname)s -- "\ "%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s" try: parent_hldr = root.parent.handlers[0] except IndexError: # parent handler is not initialized? # build our own handler --- uses sys.stderr by default. parent_hldr = logging.StreamHandler() hldr = _RedirectingHandler() hldr.set_handler(parent_hldr) fmt = logging.Formatter(format) hldr.setFormatter(fmt) root.addHandler(hldr) root.propagate = False # do not propagate to the root logger _config_logger() try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__
Update logging facility. Don't overide root logger with basicConfig.
Update logging facility. Don't overide root logger with basicConfig.
Python
bsd-2-clause
numba/numba,stefanseefeld/numba,pitrou/numba,stonebig/numba,ssarangi/numba,ssarangi/numba,seibert/numba,gmarkall/numba,seibert/numba,pombredanne/numba,sklam/numba,shiquanwang/numba,stefanseefeld/numba,cpcloud/numba,gmarkall/numba,numba/numba,jriehl/numba,cpcloud/numba,seibert/numba,jriehl/numba,jriehl/numba,pitrou/numba,GaZ3ll3/numba,pitrou/numba,IntelLabs/numba,IntelLabs/numba,seibert/numba,jriehl/numba,gdementen/numba,sklam/numba,GaZ3ll3/numba,stefanseefeld/numba,shiquanwang/numba,gmarkall/numba,GaZ3ll3/numba,pitrou/numba,stonebig/numba,gmarkall/numba,gmarkall/numba,ssarangi/numba,seibert/numba,stefanseefeld/numba,stuartarchibald/numba,gdementen/numba,numba/numba,cpcloud/numba,GaZ3ll3/numba,stefanseefeld/numba,gdementen/numba,stuartarchibald/numba,stonebig/numba,pombredanne/numba,stuartarchibald/numba,sklam/numba,ssarangi/numba,pitrou/numba,numba/numba,stuartarchibald/numba,shiquanwang/numba,pombredanne/numba,stonebig/numba,pombredanne/numba,sklam/numba,cpcloud/numba,sklam/numba,stonebig/numba,IntelLabs/numba,gdementen/numba,jriehl/numba,IntelLabs/numba,pombredanne/numba,stuartarchibald/numba,IntelLabs/numba,ssarangi/numba,numba/numba,GaZ3ll3/numba,cpcloud/numba,gdementen/numba
import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). logging.basicConfig(#level=logging.DEBUG, format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s") try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__ Update logging facility. Don't overide root logger with basicConfig.
import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). class _RedirectingHandler(logging.Handler): ''' A log hanlder that applies its formatter and redirect the emission to a parent handler. ''' def set_handler(self, handler): self.handler = handler def emit(self, record): # apply our own formatting record.msg = self.format(record) record.args = [] # clear the args # use parent handler to emit record self.handler.emit(record) def _config_logger(): root = logging.getLogger(__name__) format = "\n\033[1m%(levelname)s -- "\ "%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s" try: parent_hldr = root.parent.handlers[0] except IndexError: # parent handler is not initialized? # build our own handler --- uses sys.stderr by default. parent_hldr = logging.StreamHandler() hldr = _RedirectingHandler() hldr.set_handler(parent_hldr) fmt = logging.Formatter(format) hldr.setFormatter(fmt) root.addHandler(hldr) root.propagate = False # do not propagate to the root logger _config_logger() try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__
<commit_before>import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). logging.basicConfig(#level=logging.DEBUG, format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s") try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__ <commit_msg>Update logging facility. Don't overide root logger with basicConfig.<commit_after>
import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). class _RedirectingHandler(logging.Handler): ''' A log hanlder that applies its formatter and redirect the emission to a parent handler. ''' def set_handler(self, handler): self.handler = handler def emit(self, record): # apply our own formatting record.msg = self.format(record) record.args = [] # clear the args # use parent handler to emit record self.handler.emit(record) def _config_logger(): root = logging.getLogger(__name__) format = "\n\033[1m%(levelname)s -- "\ "%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s" try: parent_hldr = root.parent.handlers[0] except IndexError: # parent handler is not initialized? # build our own handler --- uses sys.stderr by default. parent_hldr = logging.StreamHandler() hldr = _RedirectingHandler() hldr.set_handler(parent_hldr) fmt = logging.Formatter(format) hldr.setFormatter(fmt) root.addHandler(hldr) root.propagate = False # do not propagate to the root logger _config_logger() try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__
import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). logging.basicConfig(#level=logging.DEBUG, format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s") try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__ Update logging facility. Don't overide root logger with basicConfig.import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). class _RedirectingHandler(logging.Handler): ''' A log hanlder that applies its formatter and redirect the emission to a parent handler. ''' def set_handler(self, handler): self.handler = handler def emit(self, record): # apply our own formatting record.msg = self.format(record) record.args = [] # clear the args # use parent handler to emit record self.handler.emit(record) def _config_logger(): root = logging.getLogger(__name__) format = "\n\033[1m%(levelname)s -- "\ "%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s" try: parent_hldr = root.parent.handlers[0] except IndexError: # parent handler is not initialized? # build our own handler --- uses sys.stderr by default. parent_hldr = logging.StreamHandler() hldr = _RedirectingHandler() hldr.set_handler(parent_hldr) fmt = logging.Formatter(format) hldr.setFormatter(fmt) root.addHandler(hldr) root.propagate = False # do not propagate to the root logger _config_logger() try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__
<commit_before>import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). logging.basicConfig(#level=logging.DEBUG, format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s") try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__ <commit_msg>Update logging facility. Don't overide root logger with basicConfig.<commit_after>import sys import logging # NOTE: Be sure to keep the logging level commented out before commiting. See: # https://github.com/numba/numba/issues/31 # A good work around is to make your tests handle a debug flag, per # numba.tests.test_support.main(). class _RedirectingHandler(logging.Handler): ''' A log hanlder that applies its formatter and redirect the emission to a parent handler. ''' def set_handler(self, handler): self.handler = handler def emit(self, record): # apply our own formatting record.msg = self.format(record) record.args = [] # clear the args # use parent handler to emit record self.handler.emit(record) def _config_logger(): root = logging.getLogger(__name__) format = "\n\033[1m%(levelname)s -- "\ "%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s" try: parent_hldr = root.parent.handlers[0] except IndexError: # parent handler is not initialized? # build our own handler --- uses sys.stderr by default. parent_hldr = logging.StreamHandler() hldr = _RedirectingHandler() hldr.set_handler(parent_hldr) fmt = logging.Formatter(format) hldr.setFormatter(fmt) root.addHandler(hldr) root.propagate = False # do not propagate to the root logger _config_logger() try: from . import minivect except ImportError: print(logging.error("Did you forget to update submodule minivect?")) print(logging.error("Run 'git submodule init' followed by 'git submodule update'")) raise from . import _numba_types from ._numba_types import * from . import decorators from .decorators import * def test(): raise Exception("run nosetests from the numba directory") __all__ = _numba_types.__all__ + decorators.__all__
4aa9e487cceccf81608dfb4ac9a1f85587298415
edit.py
edit.py
# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)
# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): date = self.request.get(date) template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)
Put in date = self.request.get(date) in order to call the date from the HTML.
Put in date = self.request.get(date) in order to call the date from the HTML.
Python
mit
shickey/BearStatus,shickey/BearStatus,shickey/BearStatus
# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)Put in date = self.request.get(date) in order to call the date from the HTML.
# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): date = self.request.get(date) template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)
<commit_before># request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)<commit_msg>Put in date = self.request.get(date) in order to call the date from the HTML.<commit_after>
# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): date = self.request.get(date) template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)
# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)Put in date = self.request.get(date) in order to call the date from the HTML.# request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): date = self.request.get(date) template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)
<commit_before># request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)<commit_msg>Put in date = self.request.get(date) in order to call the date from the HTML.<commit_after># request handler for editing page (/edit) goes here # assume admin login has already been handled import cgi from google.appengine.api import users import webapp2 class DateHandler(webapp2.RequestHandler): def get(self): date = self.request.get(date) template_values = { } template = jinja_environment.get_template('dates.html') self.response.out.write(template.render(template_values)) class EditHandler(webapp2.RequestHandler): def get(self): template_values = { } template = jinja_environment.get_template('edit.html') self.response.out.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/date', DateHandler) ('/edit', EditHandler) ], debug=True)
eee35fdd148c825c492333d933eaaf8503bb09f2
pydrm/image.py
pydrm/image.py
from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = str(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2)
from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = bytes(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2)
Write bytes object to mmap instead of string
Write bytes object to mmap instead of string This fixes compatibility with python3
Python
mit
notro/pydrm
from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = str(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2) Write bytes object to mmap instead of string This fixes compatibility with python3
from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = bytes(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2)
<commit_before>from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = str(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2) <commit_msg>Write bytes object to mmap instead of string This fixes compatibility with python3<commit_after>
from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = bytes(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2)
from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = str(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2) Write bytes object to mmap instead of string This fixes compatibility with python3from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = bytes(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2)
<commit_before>from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = str(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2) <commit_msg>Write bytes object to mmap instead of string This fixes compatibility with python3<commit_after>from .format import DrmFormat from .framebuffer import DrmFramebuffer from .buffer import DrmDumbBuffer class DrmImageFramebuffer(DrmFramebuffer): def __init__(self, drm=None, format_=None, width=None, height=None, bo=None): if drm and format_ and width and height and bo is None: bo = DrmDumbBuffer(drm, format_, width, height) elif (drm or format_ or width or height) and bo is None: raise TypeError() super(DrmImageFramebuffer, self).__init__(bo) self._setup() def _setup(self): from PIL import Image self.bo.mmap() if self.format.name == 'XR24': # didn't find an XRGB format self.image = Image.new("RGBX", (self.width, self.height)) else: raise ValueError("DrmImageFramebuffer does not support format '%s'" % self.format.name) def flush(self, x1=None, y1=None, x2=None, y2=None): # FIXME: Revisit and see if this can be sped up and support big endian # Convert RGBX -> Little Endian XRGB b = bytearray(self.image.tobytes()) b[0::4], b[2::4] = b[2::4], b[0::4] self.bo.map[:] = bytes(b) super(DrmImageFramebuffer, self).flush(x1, y1, x2, y2)
e75cba739b92a7209cee87f66d2c8c9df3f97799
bumper_kilt/scripts/run_kilt.py
bumper_kilt/scripts/run_kilt.py
#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=9600, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main()
#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=38400, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main()
Set parameters of serial class to match with kilt
Set parameters of serial class to match with kilt
Python
mit
ipab-rad/rad_youbot_stack,ipab-rad/rad_youbot_stack,ipab-rad/rad_youbot_stack,ipab-rad/rad_youbot_stack
#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=9600, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main() Set parameters of serial class to match with kilt
#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=38400, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main()
<commit_before>#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=9600, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main() <commit_msg>Set parameters of serial class to match with kilt<commit_after>
#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=38400, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main()
#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=9600, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main() Set parameters of serial class to match with kilt#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=38400, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main()
<commit_before>#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=9600, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main() <commit_msg>Set parameters of serial class to match with kilt<commit_after>#!/usr/bin/python import time import serial # configure the serial connections (the parameters differs on the # device you are connecting to) class Bumper(object): def __init__(self): try: self.ser = serial.Serial( port="/dev/ttyS0", baudrate=38400, parity=serial.PARITY_ODD, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS ) except Exception: print("Bad initialisation! Check the configuration of " "the serial port!") exit() self.ser.open() self.ser.isOpen() def loop(self): input=1 while 1 : # get keyboard input input = raw_input(">> ") # Python 3 users # input = input(">> ") if input == "exit": self.ser.close() exit() else: # send the character to the device # (note that I happend a \r\n carriage return and line feed to # the characters - this is requested by my device) self.ser.write(input + "\r\n") out = "" # let's wait one second before reading output (let's give # device time to answer) time.sleep(1) while self.ser.inWaiting() > 0: out += self.ser.read(1) if out != "": print ">> " + out def main(): b = Bumper() b.loop() if __name__ == "__main__": main()
be03e3d6c1323e8c750afc1d4e80997f3d9d52f3
cyder/cydhcp/interface/dynamic_intr/forms.py
cyder/cydhcp/interface/dynamic_intr/forms.py
from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',)
from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] self.fields['range'].required = True class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',)
Reset range to be required in dynamic intr form
Reset range to be required in dynamic intr form
Python
bsd-3-clause
akeym/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,zeeman/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,drkitty/cyder,zeeman/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,OSU-Net/cyder,murrown/cyder,zeeman/cyder
from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',) Reset range to be required in dynamic intr form
from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] self.fields['range'].required = True class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',)
<commit_before>from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',) <commit_msg>Reset range to be required in dynamic intr form<commit_after>
from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] self.fields['range'].required = True class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',)
from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',) Reset range to be required in dynamic intr formfrom django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] self.fields['range'].required = True class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',)
<commit_before>from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',) <commit_msg>Reset range to be required in dynamic intr form<commit_after>from django import forms from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface, DynamicIntrKeyValue) from cyder.base.mixins import UsabilityFormMixin from cyder.cydhcp.forms import RangeWizard class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin): def __init__(self, *args, **kwargs): super(DynamicInterfaceForm, self).__init__(*args, **kwargs) self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf', 'site', 'range', 'workgroup', 'dhcp_enabled', 'dns_enabled', 'ctnr'] self.fields['range'].required = True class Meta: model = DynamicInterface exclude = ('last_seen') class DynamicIntrKeyValueForm(forms.ModelForm): dynamic_interface = forms.ModelChoiceField( queryset=DynamicInterface.objects.all(), widget=forms.HiddenInput()) class Meta: model = DynamicIntrKeyValue exclude = ('is_option', 'is_statement', 'is_quoted',)
bda8f2a258023c14849119b6ac2856253e9c68b4
herd-code/herd-tools/herd-content-loader/herdcl/logger.py
herd-code/herd-tools/herd-content-loader/herdcl/logger.py
""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=1024 * 1024) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger
""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=50 * 1024, backupCount=1) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger
Manage Content Loader as Herd/DM tool - Lower log size cap
DM-12174: Manage Content Loader as Herd/DM tool - Lower log size cap
Python
apache-2.0
FINRAOS/herd,FINRAOS/herd,FINRAOS/herd,FINRAOS/herd,FINRAOS/herd
""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=1024 * 1024) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger DM-12174: Manage Content Loader as Herd/DM tool - Lower log size cap
""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=50 * 1024, backupCount=1) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger
<commit_before>""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=1024 * 1024) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger <commit_msg>DM-12174: Manage Content Loader as Herd/DM tool - Lower log size cap<commit_after>
""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=50 * 1024, backupCount=1) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger
""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=1024 * 1024) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger DM-12174: Manage Content Loader as Herd/DM tool - Lower log size cap""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=50 * 1024, backupCount=1) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger
<commit_before>""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=1024 * 1024) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger <commit_msg>DM-12174: Manage Content Loader as Herd/DM tool - Lower log size cap<commit_after>""" Copyright 2015 herd contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library imports import logging, logging.handlers import sys def get_logger(name): """ Get logger to output to file and stdout :param name: name of module calling method :return: the logger """ log_format = logging.Formatter("%(asctime)s - %(module)s - Line %(lineno)d - %(levelname)s \n%(message)s", "%Y-%m-%d %H:%M:%S") log_handler = logging.handlers.RotatingFileHandler('debug.log', mode='a', maxBytes=50 * 1024, backupCount=1) log_handler.setFormatter(log_format) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(log_format) logger = logging.getLogger(name) logger.setLevel(logging.INFO) logger.addHandler(log_handler) logger.addHandler(stream_handler) return logger
f67abceeae7716cd385a308b26ce447e0277518f
tests/git_wrapper_integration_tests.py
tests/git_wrapper_integration_tests.py
import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) def test_paths_external_git_folder(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external_git_folder(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage)
import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) class GitWrapperIntegrationTestExternalGitFolder(util.RepoTestCase): def test_paths_external(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage)
Move external git folder integration tests to a separate class
Move external git folder integration tests to a separate class
Python
mit
siu/git_repo
import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) def test_paths_external_git_folder(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external_git_folder(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) Move external git folder integration tests to a separate class
import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) class GitWrapperIntegrationTestExternalGitFolder(util.RepoTestCase): def test_paths_external(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage)
<commit_before>import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) def test_paths_external_git_folder(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external_git_folder(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) <commit_msg>Move external git folder integration tests to a separate class<commit_after>
import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) class GitWrapperIntegrationTestExternalGitFolder(util.RepoTestCase): def test_paths_external(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage)
import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) def test_paths_external_git_folder(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external_git_folder(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) Move external git folder integration tests to a separate classimport unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) class GitWrapperIntegrationTestExternalGitFolder(util.RepoTestCase): def test_paths_external(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage)
<commit_before>import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) def test_paths_external_git_folder(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external_git_folder(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) <commit_msg>Move external git folder integration tests to a separate class<commit_after>import unittest import util from git_wrapper import GitWrapper class GitWrapperIntegrationTest(util.RepoTestCase): def test_paths(self): self.open_tar_repo('project01') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage(self): self.open_tar_repo('project02') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage) class GitWrapperIntegrationTestExternalGitFolder(util.RepoTestCase): def test_paths_external(self): self.open_tar_repo('project03', '../project03.git') assert('test_file.txt' in self.repo.paths) assert('hello_world.rb' in self.repo.paths) def test_stage_external(self): self.open_tar_repo('project04', '../project04.git') assert('not_committed_file.txt' in self.repo.stage) assert('second_not_committed_file.txt' in self.repo.stage)
feef7985133241c5e11622b0932d3eb629e7fbfe
craigschart/craigschart.py
craigschart/craigschart.py
def main(): print('Hello, World.') if __name__ == '__main__': main()
from bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t) print('Hello, World.') if __name__ == '__main__': main()
Add soup extraction of links in results page
Add soup extraction of links in results page
Python
mit
supermitch/craigschart
def main(): print('Hello, World.') if __name__ == '__main__': main() Add soup extraction of links in results page
from bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t) print('Hello, World.') if __name__ == '__main__': main()
<commit_before> def main(): print('Hello, World.') if __name__ == '__main__': main() <commit_msg>Add soup extraction of links in results page<commit_after>
from bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t) print('Hello, World.') if __name__ == '__main__': main()
def main(): print('Hello, World.') if __name__ == '__main__': main() Add soup extraction of links in results pagefrom bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t) print('Hello, World.') if __name__ == '__main__': main()
<commit_before> def main(): print('Hello, World.') if __name__ == '__main__': main() <commit_msg>Add soup extraction of links in results page<commit_after>from bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t) print('Hello, World.') if __name__ == '__main__': main()
8c6b4396047736d5caf00ec30b4283ee7cdc793e
lighty/wsgi/decorators.py
lighty/wsgi/decorators.py
''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) def wrapper(*args, **kwargs): try: if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError( 'Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) except Exception as e: return monads.NoneMonad(e) return wrapper
''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) @monads.handle_exception def wrapper(*args, **kwargs): if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError('Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) return wrapper
Use exception handling with decorator
Use exception handling with decorator
Python
bsd-3-clause
GrAndSE/lighty
''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) def wrapper(*args, **kwargs): try: if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError( 'Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) except Exception as e: return monads.NoneMonad(e) return wrapper Use exception handling with decorator
''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) @monads.handle_exception def wrapper(*args, **kwargs): if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError('Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) return wrapper
<commit_before>''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) def wrapper(*args, **kwargs): try: if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError( 'Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) except Exception as e: return monads.NoneMonad(e) return wrapper <commit_msg>Use exception handling with decorator<commit_after>
''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) @monads.handle_exception def wrapper(*args, **kwargs): if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError('Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) return wrapper
''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) def wrapper(*args, **kwargs): try: if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError( 'Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) except Exception as e: return monads.NoneMonad(e) return wrapper Use exception handling with decorator''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) @monads.handle_exception def wrapper(*args, **kwargs): if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError('Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) return wrapper
<commit_before>''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) def wrapper(*args, **kwargs): try: if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError( 'Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) except Exception as e: return monads.NoneMonad(e) return wrapper <commit_msg>Use exception handling with decorator<commit_after>''' ''' import functools import operator from .. import monads def view(func, **constraints): '''Functions that decorates a view. This function can also checks the argument values ''' func.is_view = True @functools.wraps(func) @monads.handle_exception def wrapper(*args, **kwargs): if not functools.reduce(operator.__and__, [constraints[arg](kwargs[arg]) for arg in constraints]): return monads.NoneMonad(ValueError('Wrong view argument value')) return monads.ValueMonad(func(*args, **kwargs)) return wrapper
7ef1717f34360ae48f640439fd6d6706ae755e90
functional_tests/base.py
functional_tests/base.py
from selenium import webdriver from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.PhantomJS() self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear()
from selenium.webdriver.chrome.webdriver import WebDriver from selenium.webdriver.chrome.options import Options from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") self.browser = WebDriver(chrome_options=chrome_options) self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear()
Use headless chrome for functional test
Use headless chrome for functional test
Python
mit
essanpupil/cashflow,essanpupil/cashflow
from selenium import webdriver from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.PhantomJS() self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear() Use headless chrome for functional test
from selenium.webdriver.chrome.webdriver import WebDriver from selenium.webdriver.chrome.options import Options from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") self.browser = WebDriver(chrome_options=chrome_options) self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear()
<commit_before>from selenium import webdriver from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.PhantomJS() self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear() <commit_msg>Use headless chrome for functional test<commit_after>
from selenium.webdriver.chrome.webdriver import WebDriver from selenium.webdriver.chrome.options import Options from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") self.browser = WebDriver(chrome_options=chrome_options) self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear()
from selenium import webdriver from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.PhantomJS() self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear() Use headless chrome for functional testfrom selenium.webdriver.chrome.webdriver import WebDriver from selenium.webdriver.chrome.options import Options from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") self.browser = WebDriver(chrome_options=chrome_options) self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear()
<commit_before>from selenium import webdriver from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.PhantomJS() self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear() <commit_msg>Use headless chrome for functional test<commit_after>from selenium.webdriver.chrome.webdriver import WebDriver from selenium.webdriver.chrome.options import Options from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.cache import cache class BrowserTest(StaticLiveServerTestCase): def setUp(self): chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") self.browser = WebDriver(chrome_options=chrome_options) self.browser.set_window_size(1024, 768) def tearDown(self): self.browser.quit() cache.clear()
9fdea42df37c722aefb5e8fb7c04c45c06c20f17
tests/test_client_users.py
tests/test_client_users.py
import pydle from .fixtures import with_client from .mocks import Mock @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users
import pydle from .fixtures import with_client @with_client() def test_client_same_nick(server, client): assert client.is_same_nick('WiZ', 'WiZ') assert not client.is_same_nick('WiZ', 'jilles') assert not client.is_same_nick('WiZ', 'wiz') @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_invalid_creation(server, client): client._create_user('irc.fbi.gov') assert 'irc.fbi.gov' not in client.users @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_renaming_creation(server, client): client._rename_user('null', 'WiZ') assert 'WiZ' in client.users assert 'null' not in client.users @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users @with_client() def test_user_synchronization(server, client): client._create_user('WiZ') client._sync_user('WiZ', { 'hostname': 'og.irc.developer' }) assert client.users['WiZ']['hostname'] == 'og.irc.developer' @with_client() def test_user_synchronization_creation(server, client): client._sync_user('WiZ', {}) assert 'WiZ' in client.users @with_client() def test_user_invalid_synchronization(server, client): client._sync_user('irc.fbi.gov', {}) assert 'irc.fbi.gov' not in client.users
Extend client:users tests to renaming and synchronization.
tests: Extend client:users tests to renaming and synchronization.
Python
bsd-3-clause
Shizmob/pydle
import pydle from .fixtures import with_client from .mocks import Mock @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users tests: Extend client:users tests to renaming and synchronization.
import pydle from .fixtures import with_client @with_client() def test_client_same_nick(server, client): assert client.is_same_nick('WiZ', 'WiZ') assert not client.is_same_nick('WiZ', 'jilles') assert not client.is_same_nick('WiZ', 'wiz') @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_invalid_creation(server, client): client._create_user('irc.fbi.gov') assert 'irc.fbi.gov' not in client.users @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_renaming_creation(server, client): client._rename_user('null', 'WiZ') assert 'WiZ' in client.users assert 'null' not in client.users @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users @with_client() def test_user_synchronization(server, client): client._create_user('WiZ') client._sync_user('WiZ', { 'hostname': 'og.irc.developer' }) assert client.users['WiZ']['hostname'] == 'og.irc.developer' @with_client() def test_user_synchronization_creation(server, client): client._sync_user('WiZ', {}) assert 'WiZ' in client.users @with_client() def test_user_invalid_synchronization(server, client): client._sync_user('irc.fbi.gov', {}) assert 'irc.fbi.gov' not in client.users
<commit_before>import pydle from .fixtures import with_client from .mocks import Mock @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users <commit_msg>tests: Extend client:users tests to renaming and synchronization.<commit_after>
import pydle from .fixtures import with_client @with_client() def test_client_same_nick(server, client): assert client.is_same_nick('WiZ', 'WiZ') assert not client.is_same_nick('WiZ', 'jilles') assert not client.is_same_nick('WiZ', 'wiz') @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_invalid_creation(server, client): client._create_user('irc.fbi.gov') assert 'irc.fbi.gov' not in client.users @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_renaming_creation(server, client): client._rename_user('null', 'WiZ') assert 'WiZ' in client.users assert 'null' not in client.users @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users @with_client() def test_user_synchronization(server, client): client._create_user('WiZ') client._sync_user('WiZ', { 'hostname': 'og.irc.developer' }) assert client.users['WiZ']['hostname'] == 'og.irc.developer' @with_client() def test_user_synchronization_creation(server, client): client._sync_user('WiZ', {}) assert 'WiZ' in client.users @with_client() def test_user_invalid_synchronization(server, client): client._sync_user('irc.fbi.gov', {}) assert 'irc.fbi.gov' not in client.users
import pydle from .fixtures import with_client from .mocks import Mock @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users tests: Extend client:users tests to renaming and synchronization.import pydle from .fixtures import with_client @with_client() def test_client_same_nick(server, client): assert client.is_same_nick('WiZ', 'WiZ') assert not client.is_same_nick('WiZ', 'jilles') assert not client.is_same_nick('WiZ', 'wiz') @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_invalid_creation(server, client): client._create_user('irc.fbi.gov') assert 'irc.fbi.gov' not in client.users @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_renaming_creation(server, client): client._rename_user('null', 'WiZ') assert 'WiZ' in client.users assert 'null' not in client.users @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users @with_client() def test_user_synchronization(server, client): client._create_user('WiZ') client._sync_user('WiZ', { 'hostname': 'og.irc.developer' }) assert client.users['WiZ']['hostname'] == 'og.irc.developer' @with_client() def test_user_synchronization_creation(server, client): client._sync_user('WiZ', {}) assert 'WiZ' in client.users @with_client() def test_user_invalid_synchronization(server, client): client._sync_user('irc.fbi.gov', {}) assert 'irc.fbi.gov' not in client.users
<commit_before>import pydle from .fixtures import with_client from .mocks import Mock @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users <commit_msg>tests: Extend client:users tests to renaming and synchronization.<commit_after>import pydle from .fixtures import with_client @with_client() def test_client_same_nick(server, client): assert client.is_same_nick('WiZ', 'WiZ') assert not client.is_same_nick('WiZ', 'jilles') assert not client.is_same_nick('WiZ', 'wiz') @with_client() def test_user_creation(server, client): client._create_user('WiZ') assert 'WiZ' in client.users assert client.users['WiZ']['nickname'] == 'WiZ' @with_client() def test_user_invalid_creation(server, client): client._create_user('irc.fbi.gov') assert 'irc.fbi.gov' not in client.users @with_client() def test_user_renaming(server, client): client._create_user('WiZ') client._rename_user('WiZ', 'jilles') assert 'WiZ' not in client.users assert 'jilles' in client.users assert client.users['jilles']['nickname'] == 'jilles' @with_client() def test_user_renaming_creation(server, client): client._rename_user('null', 'WiZ') assert 'WiZ' in client.users assert 'null' not in client.users @with_client() def test_user_deletion(server, client): client._create_user('WiZ') client._destroy_user('WiZ') assert 'WiZ' not in client.users @with_client() def test_user_synchronization(server, client): client._create_user('WiZ') client._sync_user('WiZ', { 'hostname': 'og.irc.developer' }) assert client.users['WiZ']['hostname'] == 'og.irc.developer' @with_client() def test_user_synchronization_creation(server, client): client._sync_user('WiZ', {}) assert 'WiZ' in client.users @with_client() def test_user_invalid_synchronization(server, client): client._sync_user('irc.fbi.gov', {}) assert 'irc.fbi.gov' not in client.users
b5fc673d44624dfddfbdd98c9806b7e7e2f67331
simplekv/memory/memcachestore.py
simplekv/memory/memcachestore.py
#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
Check if putting/getting was actually successful.
Check if putting/getting was actually successful.
Python
mit
fmarczin/simplekv,fmarczin/simplekv,karteek/simplekv,mbr/simplekv,karteek/simplekv,mbr/simplekv
#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.') Check if putting/getting was actually successful.
#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
<commit_before>#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.') <commit_msg>Check if putting/getting was actually successful.<commit_after>
#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.') Check if putting/getting was actually successful.#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
<commit_before>#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): self.mc.delete(key) def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): self.mc.set(key, data) return key def _put_file(self, key, file): self.mc.set(key, file.read()) return key def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.') <commit_msg>Check if putting/getting was actually successful.<commit_after>#!/usr/bin/env python # coding=utf8 try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from .. import KeyValueStore class MemcacheStore(KeyValueStore): def __contains__(self, key): try: return key in self.mc except TypeError: raise IOError('memcache implementation does not support '\ '__contains__') def __init__(self, mc): self.mc = mc def _delete(self, key): if not self.mc.delete(key): raise IOError('Error deleting key') def _get(self, key): rv = self.mc.get(key) if None == rv: raise KeyError(key) return rv def _get_file(self, key, file): file.write(self._get(key)) def _open(self, key): return StringIO(self._get(key)) def _put(self, key, data): if not self.mc.set(key, data): if len(data) >= 1024 * 1023: raise IOError('Failed to store data, probably too large. '\ 'memcached limit is 1M') raise IOError('Failed to store data') return key def _put_file(self, key, file): return self._put(key, file.read()) def keys(self): raise IOError('Memcache does not support listing keys.') def iter_keys(self): raise IOError('Memcache does not support key iteration.')
67710f78677c4ad2737504d9815154c8e9f3bb18
django_prometheus/utils.py
django_prometheus/utils.py
import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count)] + [_INF]
import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count+lower)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count+lower)] + [_INF]
Fix PowersOf to return `count` items, not "numbers up to base**count".
Fix PowersOf to return `count` items, not "numbers up to base**count".
Python
apache-2.0
obytes/django-prometheus,wangwanzhong/django-prometheus,wangwanzhong/django-prometheus,korfuri/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus,DingaGa/django-prometheus,DingaGa/django-prometheus
import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count)] + [_INF] Fix PowersOf to return `count` items, not "numbers up to base**count".
import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count+lower)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count+lower)] + [_INF]
<commit_before>import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count)] + [_INF] <commit_msg>Fix PowersOf to return `count` items, not "numbers up to base**count".<commit_after>
import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count+lower)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count+lower)] + [_INF]
import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count)] + [_INF] Fix PowersOf to return `count` items, not "numbers up to base**count".import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count+lower)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count+lower)] + [_INF]
<commit_before>import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count)] + [_INF] <commit_msg>Fix PowersOf to return `count` items, not "numbers up to base**count".<commit_after>import time from prometheus_client import _INF # TODO(korfuri): if python>3.3, use perf_counter() or monotonic(). def Time(): """Returns some representation of the current time. This wrapper is meant to take advantage of a higher time resolution when available. Thus, its return value should be treated as an opaque object. It can be compared to the current time with TimeSince(). """ return time.time() def TimeSince(t): """Compares a value returned by Time() to the current time. Returns: the time since t, in fractional seconds. """ return time.time() - t def PowersOf(logbase, count, lower=0, include_zero=True): """Returns a list of count powers of logbase (from logbase**lower).""" if not include_zero: return [logbase ** i for i in range(lower, count+lower)] + [_INF] else: return [0] + [logbase ** i for i in range(lower, count+lower)] + [_INF]
ef576a884bd4155fbc8bec8615c25544e74b98c3
samsungctl/__init__.py
samsungctl/__init__.py
"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0-git" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT"
"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0+1" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT"
Use PEP 440 compatible local version identifier
Use PEP 440 compatible local version identifier
Python
mit
Ape/samsungctl
"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0-git" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT" Use PEP 440 compatible local version identifier
"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0+1" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT"
<commit_before>"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0-git" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT" <commit_msg>Use PEP 440 compatible local version identifier<commit_after>
"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0+1" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT"
"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0-git" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT" Use PEP 440 compatible local version identifier"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0+1" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT"
<commit_before>"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0-git" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT" <commit_msg>Use PEP 440 compatible local version identifier<commit_after>"""Remote control Samsung televisions via TCP/IP connection""" from .remote import Remote __title__ = "samsungctl" __version__ = "0.6.0+1" __url__ = "https://github.com/Ape/samsungctl" __author__ = "Lauri Niskanen" __author_email__ = "ape@ape3000.com" __license__ = "MIT"
c037b0a9ef8424737b978dda0400a1c31d5cb300
app/state.py
app/state.py
import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self.process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self.process = multiprocessing.Process(target=program.run, args=(params,)) self.process.start() def stop_program(self): if self.process is not None: self.process.terminate() unicornhat.show() state = State()
import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() state = State()
Mark process member variable for internal use
Mark process member variable for internal use
Python
mit
njbbaer/unicorn-remote,njbbaer/unicorn-remote,njbbaer/unicorn-remote
import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self.process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self.process = multiprocessing.Process(target=program.run, args=(params,)) self.process.start() def stop_program(self): if self.process is not None: self.process.terminate() unicornhat.show() state = State()Mark process member variable for internal use
import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() state = State()
<commit_before>import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self.process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self.process = multiprocessing.Process(target=program.run, args=(params,)) self.process.start() def stop_program(self): if self.process is not None: self.process.terminate() unicornhat.show() state = State()<commit_msg>Mark process member variable for internal use<commit_after>
import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() state = State()
import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self.process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self.process = multiprocessing.Process(target=program.run, args=(params,)) self.process.start() def stop_program(self): if self.process is not None: self.process.terminate() unicornhat.show() state = State()Mark process member variable for internal useimport multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() state = State()
<commit_before>import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self.process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self.process = multiprocessing.Process(target=program.run, args=(params,)) self.process.start() def stop_program(self): if self.process is not None: self.process.terminate() unicornhat.show() state = State()<commit_msg>Mark process member variable for internal use<commit_after>import multiprocessing import unicornhat import importlib import sys import os class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): self.stop_program() if "brightness" in params: unicornhat.brightness(float(params["brightness"])) if "rotation" in params: unicornhat.rotation(int(params["rotation"])) program = importlib.import_module("app.programs." + name) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() state = State()
f4c99f4a1b3e49e0768af1b4b6444ee33bef49ac
microauth/urls.py
microauth/urls.py
"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), ] urlpatterns += staticfiles_urlpatterns()
"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), url(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}), ] urlpatterns += staticfiles_urlpatterns()
Add a missing route leading to the login page.
Add a missing route leading to the login page.
Python
mit
microserv/microauth,microserv/microauth,microserv/microauth
"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), ] urlpatterns += staticfiles_urlpatterns() Add a missing route leading to the login page.
"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), url(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}), ] urlpatterns += staticfiles_urlpatterns()
<commit_before>"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), ] urlpatterns += staticfiles_urlpatterns() <commit_msg>Add a missing route leading to the login page.<commit_after>
"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), url(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}), ] urlpatterns += staticfiles_urlpatterns()
"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), ] urlpatterns += staticfiles_urlpatterns() Add a missing route leading to the login page."""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), url(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}), ] urlpatterns += staticfiles_urlpatterns()
<commit_before>"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), ] urlpatterns += staticfiles_urlpatterns() <commit_msg>Add a missing route leading to the login page.<commit_after>"""microauth URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns = [ url(r'^admin/', admin.site.urls), # django-oauth-toolkit url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')), url(r'^api/', include('apps.api.urls', namespace='microauth_api')), url(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}), ] urlpatterns += staticfiles_urlpatterns()
49f332149ae8a9a3b5faf82bc20b46dfaeb0a3ad
indra/sources/ctd/api.py
indra/sources/ctd/api.py
import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset): if subset not in urls: raise ValueError('%s is not a valid CTD subset.') df = pandas.read_csv(urls[subset], sep='\t', comment='#', header=None) return process_dataframe(df) def process_tsv(fname, subset): df = pandas.read_csv(fname, sep='\t', comment='#', header=None) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.') cp = processors[subset](df) cp.extract_statements() return cp
import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset, url=None): if subset not in urls: raise ValueError('%s is not a valid CTD subset.' % subset) url = url if url else urls[subset] return _process_url_or_file(url, subset) def process_tsv(fname, subset): return _process_url_or_file(fname, subset) def _process_url_or_file(path, subset): df = pandas.read_csv(path, sep='\t', comment='#', header=None, dtype=str, keep_default_na=False) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.' % subset) cp = processors[subset](df) cp.extract_statements() return cp
Refactor API to have single pandas load
Refactor API to have single pandas load
Python
bsd-2-clause
sorgerlab/indra,bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra
import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset): if subset not in urls: raise ValueError('%s is not a valid CTD subset.') df = pandas.read_csv(urls[subset], sep='\t', comment='#', header=None) return process_dataframe(df) def process_tsv(fname, subset): df = pandas.read_csv(fname, sep='\t', comment='#', header=None) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.') cp = processors[subset](df) cp.extract_statements() return cp Refactor API to have single pandas load
import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset, url=None): if subset not in urls: raise ValueError('%s is not a valid CTD subset.' % subset) url = url if url else urls[subset] return _process_url_or_file(url, subset) def process_tsv(fname, subset): return _process_url_or_file(fname, subset) def _process_url_or_file(path, subset): df = pandas.read_csv(path, sep='\t', comment='#', header=None, dtype=str, keep_default_na=False) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.' % subset) cp = processors[subset](df) cp.extract_statements() return cp
<commit_before>import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset): if subset not in urls: raise ValueError('%s is not a valid CTD subset.') df = pandas.read_csv(urls[subset], sep='\t', comment='#', header=None) return process_dataframe(df) def process_tsv(fname, subset): df = pandas.read_csv(fname, sep='\t', comment='#', header=None) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.') cp = processors[subset](df) cp.extract_statements() return cp <commit_msg>Refactor API to have single pandas load<commit_after>
import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset, url=None): if subset not in urls: raise ValueError('%s is not a valid CTD subset.' % subset) url = url if url else urls[subset] return _process_url_or_file(url, subset) def process_tsv(fname, subset): return _process_url_or_file(fname, subset) def _process_url_or_file(path, subset): df = pandas.read_csv(path, sep='\t', comment='#', header=None, dtype=str, keep_default_na=False) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.' % subset) cp = processors[subset](df) cp.extract_statements() return cp
import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset): if subset not in urls: raise ValueError('%s is not a valid CTD subset.') df = pandas.read_csv(urls[subset], sep='\t', comment='#', header=None) return process_dataframe(df) def process_tsv(fname, subset): df = pandas.read_csv(fname, sep='\t', comment='#', header=None) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.') cp = processors[subset](df) cp.extract_statements() return cp Refactor API to have single pandas loadimport pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset, url=None): if subset not in urls: raise ValueError('%s is not a valid CTD subset.' % subset) url = url if url else urls[subset] return _process_url_or_file(url, subset) def process_tsv(fname, subset): return _process_url_or_file(fname, subset) def _process_url_or_file(path, subset): df = pandas.read_csv(path, sep='\t', comment='#', header=None, dtype=str, keep_default_na=False) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.' % subset) cp = processors[subset](df) cp.extract_statements() return cp
<commit_before>import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset): if subset not in urls: raise ValueError('%s is not a valid CTD subset.') df = pandas.read_csv(urls[subset], sep='\t', comment='#', header=None) return process_dataframe(df) def process_tsv(fname, subset): df = pandas.read_csv(fname, sep='\t', comment='#', header=None) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.') cp = processors[subset](df) cp.extract_statements() return cp <commit_msg>Refactor API to have single pandas load<commit_after>import pandas from .processor import CTDChemicalDiseaseProcessor, \ CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor base_url = 'http://ctdbase.org/reports/' urls = { 'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz', 'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz', 'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz', } processors = { 'chemical_gene': CTDChemicalGeneProcessor, 'chemical_disease': CTDChemicalDiseaseProcessor, 'gene_disease': CTDGeneDiseaseProcessor, } def process_from_web(subset, url=None): if subset not in urls: raise ValueError('%s is not a valid CTD subset.' % subset) url = url if url else urls[subset] return _process_url_or_file(url, subset) def process_tsv(fname, subset): return _process_url_or_file(fname, subset) def _process_url_or_file(path, subset): df = pandas.read_csv(path, sep='\t', comment='#', header=None, dtype=str, keep_default_na=False) return process_dataframe(df, subset) def process_dataframe(df, subset): if subset not in processors: raise ValueError('%s is not a valid CTD subset.' % subset) cp = processors[subset](df) cp.extract_statements() return cp
d7e0fd7027f4a4a5024d136c0ab96b244d761c99
pucas/__init__.py
pucas/__init__.py
default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 5, 1, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 6, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
Set develop version back to 0.6-dev
Set develop version back to 0.6-dev
Python
apache-2.0
Princeton-CDH/django-pucas,Princeton-CDH/django-pucas
default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 5, 1, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) Set develop version back to 0.6-dev
default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 6, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
<commit_before>default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 5, 1, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) <commit_msg>Set develop version back to 0.6-dev<commit_after>
default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 6, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 5, 1, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) Set develop version back to 0.6-devdefault_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 6, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
<commit_before>default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 5, 1, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) <commit_msg>Set develop version back to 0.6-dev<commit_after>default_app_config = 'pucas.apps.PucasConfig' __version_info__ = (0, 6, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
71cdbeada7e11634e1168ca2e825167cbe87b4de
spacy/lang/de/norm_exceptions.py
spacy/lang/de/norm_exceptions.py
# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string] = norm NORM_EXCEPTIONS[string.title()] = norm
# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string.title()] = norm
Revert "Also include lowercase norm exceptions"
Revert "Also include lowercase norm exceptions" This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.
Python
mit
aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy
# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string] = norm NORM_EXCEPTIONS[string.title()] = norm Revert "Also include lowercase norm exceptions" This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.
# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string.title()] = norm
<commit_before># coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string] = norm NORM_EXCEPTIONS[string.title()] = norm <commit_msg>Revert "Also include lowercase norm exceptions" This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.<commit_after>
# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string.title()] = norm
# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string] = norm NORM_EXCEPTIONS[string.title()] = norm Revert "Also include lowercase norm exceptions" This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.# coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string.title()] = norm
<commit_before># coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string] = norm NORM_EXCEPTIONS[string.title()] = norm <commit_msg>Revert "Also include lowercase norm exceptions" This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.<commit_after># coding: utf8 from __future__ import unicode_literals # Here we only want to include the absolute most common words. Otherwise, # this list would get impossibly long for German – especially considering the # old vs. new spelling rules, and all possible cases. _exc = { "daß": "dass" } NORM_EXCEPTIONS = {} for string, norm in _exc.items(): NORM_EXCEPTIONS[string.title()] = norm
a3689129e2938dd7f2213ea11ac36a854cc0a31e
test/test_jarvis.py
test/test_jarvis.py
import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_log(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main()
import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_file_to_json(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main()
Change test method name to better match
Change test method name to better match
Python
apache-2.0
clb6/jarvis-cli
import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_log(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main() Change test method name to better match
import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_file_to_json(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main()
<commit_before>import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_log(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main() <commit_msg>Change test method name to better match<commit_after>
import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_file_to_json(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main()
import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_log(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main() Change test method name to better matchimport unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_file_to_json(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main()
<commit_before>import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_log(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main() <commit_msg>Change test method name to better match<commit_after>import unittest from collections import namedtuple # TODO: Move this code to a module so we don't depend on PYTHONPATH and that sort # of ugliness. from jarvis import convert_file_to_json, get_tags JarvisSettings = namedtuple('JarvisSettings', ['tags_directory']) class TestJarvis(unittest.TestCase): def setUp(self): with open('fixtures/test_log.md', 'r') as f: self.test_log = f.read() self.js = JarvisSettings('fixtures/tags') def test_convert_file_to_json(self): try: j = convert_file_to_json(self.test_log) # Version 0.2.0 expected_keys = sorted(['version', 'created', 'body', 'tags', 'occurred', 'author']) actual_keys = sorted(j.keys()) self.assertListEqual(expected_keys, actual_keys) except Exception as e: self.fail("Unexpected error while parsing test_log") def test_get_tags(self): expected_tags = ['TestA', 'TestB&C'] actual_tags = get_tags(self.js) self.assertListEqual(expected_tags, actual_tags) if __name__ == "__main__": """ To run: export PYTHONPATH="$HOME/oz/workspace/Jarvis/bin" python -m unittest test_jarvis.py """ unittest.main()
346f726a12078e7667ef32808feda0618f568839
lino_extjs6/projects/mysite/settings/demo.py
lino_extjs6/projects/mysite/settings/demo.py
import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 03, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True
import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 3, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True
Add support to Py3 for datetime
Add support to Py3 for datetime
Python
agpl-3.0
lino-framework/extjs6,lsaffre/lino_extjs6,lsaffre/lino_extjs6,lsaffre/lino_extjs6,lsaffre/lino_extjs6,lino-framework/extjs6,lsaffre/lino_extjs6,lino-framework/extjs6
import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 03, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True Add support to Py3 for datetime
import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 3, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True
<commit_before>import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 03, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True <commit_msg>Add support to Py3 for datetime<commit_after>
import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 3, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True
import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 03, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True Add support to Py3 for datetimeimport datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 3, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True
<commit_before>import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 03, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True <commit_msg>Add support to Py3 for datetime<commit_after>import datetime from lino_extjs6.projects.mysite.settings import * class Site(Site): project_name = 'extjs_demo' is_demo_site = True # ignore_dates_after = datetime.date(2019, 05, 22) the_demo_date = datetime.date(2015, 3, 12) SITE = Site(globals()) SECRET_KEY = "1234" # ALLOWED_HOSTS = ['*'] DEBUG = True
f5c74e6869b54bf6d16bb8493d3c76e9fb65bec5
createdb.py
createdb.py
#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) session.commit()
#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) context4 = fmn.lib.models.Context.create( session, name="desktop", description="fedmsg-notify", detail_name="None", icon="console", placeholder="There's no need to put a value here" ) session.commit()
Add the desktop context to the setup script.
Add the desktop context to the setup script.
Python
lgpl-2.1
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) session.commit() Add the desktop context to the setup script.
#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) context4 = fmn.lib.models.Context.create( session, name="desktop", description="fedmsg-notify", detail_name="None", icon="console", placeholder="There's no need to put a value here" ) session.commit()
<commit_before>#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) session.commit() <commit_msg>Add the desktop context to the setup script.<commit_after>
#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) context4 = fmn.lib.models.Context.create( session, name="desktop", description="fedmsg-notify", detail_name="None", icon="console", placeholder="There's no need to put a value here" ) session.commit()
#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) session.commit() Add the desktop context to the setup script.#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) context4 = fmn.lib.models.Context.create( session, name="desktop", description="fedmsg-notify", detail_name="None", icon="console", placeholder="There's no need to put a value here" ) session.commit()
<commit_before>#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) session.commit() <commit_msg>Add the desktop context to the setup script.<commit_after>#!/usr/bin/env python import sys import fedmsg.config import fmn.lib.models config = fedmsg.config.load_config() uri = config.get('fmn.sqlalchemy.uri') if not uri: raise ValueError("fmn.sqlalchemy.uri must be present") if '-h' in sys.argv or '--help'in sys.argv: print "createdb.py [--with-dev-data]" sys.exit(0) session = fmn.lib.models.init(uri, debug=True, create=True) if '--with-dev-data' in sys.argv: context1 = fmn.lib.models.Context.create( session, name="irc", description="Internet Relay Chat", detail_name="irc nick", icon="user", placeholder="z3r0_c00l", ) context2 = fmn.lib.models.Context.create( session, name="email", description="Electronic Mail", detail_name="email address", icon="envelope", placeholder="jane@fedoraproject.org", ) context3 = fmn.lib.models.Context.create( session, name="android", description="Google Cloud Messaging", detail_name="registration id", icon="phone", placeholder="laksdjfasdlfkj183097falkfj109f" ) context4 = fmn.lib.models.Context.create( session, name="desktop", description="fedmsg-notify", detail_name="None", icon="console", placeholder="There's no need to put a value here" ) session.commit()
e8795b5f45ea67c83f7e99c54b04e0dceb1fee34
run-cron.py
run-cron.py
#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete")
#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) os.chdir(path) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete")
Change to the root directory before executing (stops Mumble ICE file errors)
Change to the root directory before executing (stops Mumble ICE file errors) This was causing the SSO job to fail, thanks for psu3d0 for pointing it out.
Python
bsd-3-clause
nikdoof/test-auth
#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete") Change to the root directory before executing (stops Mumble ICE file errors) This was causing the SSO job to fail, thanks for psu3d0 for pointing it out.
#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) os.chdir(path) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete")
<commit_before>#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete") <commit_msg>Change to the root directory before executing (stops Mumble ICE file errors) This was causing the SSO job to fail, thanks for psu3d0 for pointing it out.<commit_after>
#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) os.chdir(path) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete")
#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete") Change to the root directory before executing (stops Mumble ICE file errors) This was causing the SSO job to fail, thanks for psu3d0 for pointing it out.#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) os.chdir(path) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete")
<commit_before>#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete") <commit_msg>Change to the root directory before executing (stops Mumble ICE file errors) This was causing the SSO job to fail, thanks for psu3d0 for pointing it out.<commit_after>#!/usr/bin/env python """Executes a Django cronjob""" import os # Activate the virtualenv path = os.path.dirname(os.path.abspath( __file__ )) os.chdir(path) activate_this = os.path.join(path, 'env/bin/activate_this.py') execfile(activate_this, dict(__file__=activate_this)) import sys import logging from django.core.management import setup_environ import settings setup_environ(settings) logging.basicConfig(level=logging.DEBUG) log = logging.getLogger('runcron') try: mod = __import__(sys.argv[1]) except ImportError: raise Exception('Error creating service') for i in sys.argv[1].split(".")[1:]: mod = getattr(mod, i) cron_class = getattr(mod, sys.argv[2])() log.info("Starting Job %s in %s" % (sys.argv[2], sys.argv[1])) #try: cron_class.job() #except: # log.error("Error executing job, aborting.") log.info("Job complete")
cd944a2606159c8ea11ffe8075ce4ec186fd799c
tests/basic_test.py
tests/basic_test.py
import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None")
import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
Update tests to use class-based interface
Update tests to use class-based interface
Python
mit
AlterCodex/nxppy,Schoberm/nxppy,AlterCodex/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,AlterCodex/nxppy
import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") Update tests to use class-based interface
import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
<commit_before>import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") <commit_msg>Update tests to use class-based interface<commit_after>
import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") Update tests to use class-based interfaceimport unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
<commit_before>import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") <commit_msg>Update tests to use class-based interface<commit_after>import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
eb03de241f3d47173381ee22f85b5cdf5d9c1fb4
examples/monitoring/worker.py
examples/monitoring/worker.py
import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception as e: worker.logger.error('Cannot connect to graphite')
import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception: worker.logger.error('Cannot connect to graphite')
Fix flake8 issues in examples
Fix flake8 issues in examples
Python
apache-2.0
aioworkers/aioworkers
import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception as e: worker.logger.error('Cannot connect to graphite') Fix flake8 issues in examples
import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception: worker.logger.error('Cannot connect to graphite')
<commit_before>import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception as e: worker.logger.error('Cannot connect to graphite') <commit_msg>Fix flake8 issues in examples<commit_after>
import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception: worker.logger.error('Cannot connect to graphite')
import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception as e: worker.logger.error('Cannot connect to graphite') Fix flake8 issues in examplesimport random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception: worker.logger.error('Cannot connect to graphite')
<commit_before>import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception as e: worker.logger.error('Cannot connect to graphite') <commit_msg>Fix flake8 issues in examples<commit_after>import random import time from os import getenv from aiographite.aiographite import connect from aiographite.protocol import PlaintextProtocol GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost') async def run(worker, *args, **kwargs): value = random.randrange(10) try: connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop) await connection.send('workers.worker', value, time.time()) await connection.close() except Exception: worker.logger.error('Cannot connect to graphite')
d799b3fdf6365fb54f0a1553b9a68c4334dd5482
examples/project_one/input.py
examples/project_one/input.py
def read_file(filename): # Open the file file_obj = open(filename) # Iterate over lines in the file for line in file_obj: # Split line by spaces (creates a list) # Alternatives: split(',') numbers = line.split() if len(numbers) != 2: # Convert strings to numbers numbers2 = [] for number in numbers: # Convert number to float number = float(number) # Append to temperary list numbers2.append(number) # Replace numbers by numbers2 numbers = numbers2 else: # We're processing a header print 'Skipping header line' return contents # Just for debugging purposes read_file('data.txt')
Convert list, for later reference.
Convert list, for later reference.
Python
mit
dokterbob/slf-programming-workshops,dokterbob/slf-programming-workshops,dokterbob/slf-programming-workshops,dokterbob/slf-programming-workshops
Convert list, for later reference.
def read_file(filename): # Open the file file_obj = open(filename) # Iterate over lines in the file for line in file_obj: # Split line by spaces (creates a list) # Alternatives: split(',') numbers = line.split() if len(numbers) != 2: # Convert strings to numbers numbers2 = [] for number in numbers: # Convert number to float number = float(number) # Append to temperary list numbers2.append(number) # Replace numbers by numbers2 numbers = numbers2 else: # We're processing a header print 'Skipping header line' return contents # Just for debugging purposes read_file('data.txt')
<commit_before><commit_msg>Convert list, for later reference.<commit_after>
def read_file(filename): # Open the file file_obj = open(filename) # Iterate over lines in the file for line in file_obj: # Split line by spaces (creates a list) # Alternatives: split(',') numbers = line.split() if len(numbers) != 2: # Convert strings to numbers numbers2 = [] for number in numbers: # Convert number to float number = float(number) # Append to temperary list numbers2.append(number) # Replace numbers by numbers2 numbers = numbers2 else: # We're processing a header print 'Skipping header line' return contents # Just for debugging purposes read_file('data.txt')
Convert list, for later reference.def read_file(filename): # Open the file file_obj = open(filename) # Iterate over lines in the file for line in file_obj: # Split line by spaces (creates a list) # Alternatives: split(',') numbers = line.split() if len(numbers) != 2: # Convert strings to numbers numbers2 = [] for number in numbers: # Convert number to float number = float(number) # Append to temperary list numbers2.append(number) # Replace numbers by numbers2 numbers = numbers2 else: # We're processing a header print 'Skipping header line' return contents # Just for debugging purposes read_file('data.txt')
<commit_before><commit_msg>Convert list, for later reference.<commit_after>def read_file(filename): # Open the file file_obj = open(filename) # Iterate over lines in the file for line in file_obj: # Split line by spaces (creates a list) # Alternatives: split(',') numbers = line.split() if len(numbers) != 2: # Convert strings to numbers numbers2 = [] for number in numbers: # Convert number to float number = float(number) # Append to temperary list numbers2.append(number) # Replace numbers by numbers2 numbers = numbers2 else: # We're processing a header print 'Skipping header line' return contents # Just for debugging purposes read_file('data.txt')
8396ac44d434a06c410c516b6109ec6ace030601
examples/pyuv_cffi_example.py
examples/pyuv_cffi_example.py
"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # stop and free any timers before freeing the loop print('loop.run() -> ', status) def main(): run() if __name__ == '__main__': main()
"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # we must stop and free any other handles before freeing the loop print('loop.run() -> ', status) # all handles in pyuv_cffi (including the loop) are automatically freed when they go out of # scope def main(): run() if __name__ == '__main__': main()
Add inline comment regarding freeing resources
Add inline comment regarding freeing resources
Python
mit
veegee/guv,veegee/guv
"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # stop and free any timers before freeing the loop print('loop.run() -> ', status) def main(): run() if __name__ == '__main__': main() Add inline comment regarding freeing resources
"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # we must stop and free any other handles before freeing the loop print('loop.run() -> ', status) # all handles in pyuv_cffi (including the loop) are automatically freed when they go out of # scope def main(): run() if __name__ == '__main__': main()
<commit_before>"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # stop and free any timers before freeing the loop print('loop.run() -> ', status) def main(): run() if __name__ == '__main__': main() <commit_msg>Add inline comment regarding freeing resources<commit_after>
"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # we must stop and free any other handles before freeing the loop print('loop.run() -> ', status) # all handles in pyuv_cffi (including the loop) are automatically freed when they go out of # scope def main(): run() if __name__ == '__main__': main()
"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # stop and free any timers before freeing the loop print('loop.run() -> ', status) def main(): run() if __name__ == '__main__': main() Add inline comment regarding freeing resources"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # we must stop and free any other handles before freeing the loop print('loop.run() -> ', status) # all handles in pyuv_cffi (including the loop) are automatically freed when they go out of # scope def main(): run() if __name__ == '__main__': main()
<commit_before>"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # stop and free any timers before freeing the loop print('loop.run() -> ', status) def main(): run() if __name__ == '__main__': main() <commit_msg>Add inline comment regarding freeing resources<commit_after>"""A simple example demonstrating basic usage of pyuv_cffi This example creates a timer handle and a signal handle, then starts the loop. The timer callback is run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener for the INT signal and allows us to exit the loop by pressing ctrl-c. """ import signal from pyuv_cffi import Loop, Timer, Signal def sig_cb(sig_h, sig_num): print('\nsig_cb({}, {})'.format(sig_h, sig_num)) sig_h.stop() sig_h.loop.stop() def timer_cb(timer_h): print('timer_cb({})'.format(timer_h)) def run(): loop = Loop() timer_h = Timer(loop) timer_h.start(timer_cb, 1, 1) sig_h = Signal(loop) sig_h.start(sig_cb, signal.SIGINT) status = loop.run() timer_h.close() # we must stop and free any other handles before freeing the loop print('loop.run() -> ', status) # all handles in pyuv_cffi (including the loop) are automatically freed when they go out of # scope def main(): run() if __name__ == '__main__': main()
c28ae7e4b0637a2c4db120d9add13d5589ddca40
runtests.py
runtests.py
#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings try: django.setup() except AttributeError: # 1.6 or lower pass TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests()
#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests()
Remove compat shim as it doesn't apply
Remove compat shim as it doesn't apply
Python
mit
sergei-maertens/django-systemjs,sergei-maertens/django-systemjs,sergei-maertens/django-systemjs,sergei-maertens/django-systemjs
#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings try: django.setup() except AttributeError: # 1.6 or lower pass TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests() Remove compat shim as it doesn't apply
#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests()
<commit_before>#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings try: django.setup() except AttributeError: # 1.6 or lower pass TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests() <commit_msg>Remove compat shim as it doesn't apply<commit_after>
#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests()
#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings try: django.setup() except AttributeError: # 1.6 or lower pass TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests() Remove compat shim as it doesn't apply#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests()
<commit_before>#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings try: django.setup() except AttributeError: # 1.6 or lower pass TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests() <commit_msg>Remove compat shim as it doesn't apply<commit_after>#!/usr/bin/env python import os import sys def runtests(): test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' import django from django.test.utils import get_runner from django.conf import settings django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(['.']) sys.exit(failures) if __name__ == '__main__': runtests()
0dddfcbdb46ac91ddc0bfed4482bce049a8593c2
lazyblacksmith/views/blueprint.py
lazyblacksmith/views/blueprint.py
# -*- encoding: utf-8 -*- from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter_by(wh=False) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html')
# -*- encoding: utf-8 -*- import config from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter( Region.id.in_(config.CREST_REGION_PRICE) ).filter_by( wh=False ) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html')
Change region list to match config
Change region list to match config
Python
bsd-3-clause
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
# -*- encoding: utf-8 -*- from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter_by(wh=False) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html') Change region list to match config
# -*- encoding: utf-8 -*- import config from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter( Region.id.in_(config.CREST_REGION_PRICE) ).filter_by( wh=False ) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html')
<commit_before># -*- encoding: utf-8 -*- from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter_by(wh=False) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html') <commit_msg>Change region list to match config<commit_after>
# -*- encoding: utf-8 -*- import config from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter( Region.id.in_(config.CREST_REGION_PRICE) ).filter_by( wh=False ) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html')
# -*- encoding: utf-8 -*- from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter_by(wh=False) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html') Change region list to match config# -*- encoding: utf-8 -*- import config from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter( Region.id.in_(config.CREST_REGION_PRICE) ).filter_by( wh=False ) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html')
<commit_before># -*- encoding: utf-8 -*- from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter_by(wh=False) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html') <commit_msg>Change region list to match config<commit_after># -*- encoding: utf-8 -*- import config from flask import Blueprint from flask import render_template from lazyblacksmith.models import Activity from lazyblacksmith.models import Item from lazyblacksmith.models import Region blueprint = Blueprint('blueprint', __name__) @blueprint.route('/manufacturing/<int:item_id>') def manufacturing(item_id): """ Display the manufacturing page with all data """ item = Item.query.get(item_id) activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING) product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one() regions = Region.query.filter( Region.id.in_(config.CREST_REGION_PRICE) ).filter_by( wh=False ) # is any of the materials manufactured ? has_manufactured_components = False for material in materials: if material.material.is_manufactured(): has_manufactured_components = True break return render_template('blueprint/manufacturing.html', **{ 'blueprint': item, 'materials': materials, 'activity': activity, 'product': product, 'regions': regions, 'has_manufactured_components': has_manufactured_components, }) @blueprint.route('/') def search(): return render_template('blueprint/search.html')
563220ef19395201aed7f6392519f84db4ec7a77
tests/test_midas.py
tests/test_midas.py
import datetime from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert False
import datetime import numpy as np from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
Add assertion for forecast test
Add assertion for forecast test
Python
mit
mikemull/midaspy
import datetime from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert False Add assertion for forecast test
import datetime import numpy as np from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
<commit_before>import datetime from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert False <commit_msg>Add assertion for forecast test<commit_after>
import datetime import numpy as np from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
import datetime from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert False Add assertion for forecast testimport datetime import numpy as np from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
<commit_before>import datetime from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert False <commit_msg>Add assertion for forecast test<commit_after>import datetime import numpy as np from midas import mix from midas.midas import estimate, forecast def test_estimate(gdp_data, farmpay_data): y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1, start_date=datetime.datetime(1985, 1, 1), end_date=datetime.datetime(2009, 1, 1)) res = estimate(y, yl, x) fc = forecast(xf, ylf, res) print(fc) assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
1f9a11640463df94166be8dffa824e57485154f8
tests/vaspy_test.py
tests/vaspy_test.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.")
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest from ani_test import AniFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), unittest.TestLoader().loadTestsFromTestCase(AniFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.")
Add test for animation file.
Add test for animation file.
Python
mit
PytLab/VASPy,PytLab/VASPy
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.") Add test for animation file.
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest from ani_test import AniFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), unittest.TestLoader().loadTestsFromTestCase(AniFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.")
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.") <commit_msg>Add test for animation file.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest from ani_test import AniFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), unittest.TestLoader().loadTestsFromTestCase(AniFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.")
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.") Add test for animation file.#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest from ani_test import AniFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), unittest.TestLoader().loadTestsFromTestCase(AniFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.")
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.") <commit_msg>Add test for animation file.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest from arc_test import ArcTest from incar_test import InCarTest from oszicar_test import OsziCarTest from outcar_test import OutCarTest from xsd_test import XsdTest from xtd_test import XtdTest from poscar_test import PosCarTest from xyzfile_test import XyzFileTest from cif_test import CifFileTest from ani_test import AniFileTest def suite(): suite = unittest.TestSuite([ unittest.TestLoader().loadTestsFromTestCase(ArcTest), unittest.TestLoader().loadTestsFromTestCase(InCarTest), unittest.TestLoader().loadTestsFromTestCase(OsziCarTest), unittest.TestLoader().loadTestsFromTestCase(OutCarTest), unittest.TestLoader().loadTestsFromTestCase(XsdTest), unittest.TestLoader().loadTestsFromTestCase(XtdTest), unittest.TestLoader().loadTestsFromTestCase(PosCarTest), unittest.TestLoader().loadTestsFromTestCase(XyzFileTest), unittest.TestLoader().loadTestsFromTestCase(CifFileTest), unittest.TestLoader().loadTestsFromTestCase(AniFileTest), ]) return suite if "__main__" == __name__: result = unittest.TextTestRunner(verbosity=2).run(suite()) if result.errors or result.failures: raise ValueError("Get errors and failures.")
b8a22c1dfe58802665231e8a82bb546bfd1dbbc8
pybossa/sentinel/__init__.py
pybossa/sentinel/__init__.py
from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) self.master = self.connection.master_for('mymaster') self.slave = self.connection.slave_for('mymaster')
from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) redis_db = app.config['REDIS_DB'] or 0 print "Redis db is ", redis_db self.master = self.connection.master_for('mymaster', db=redis_db) self.slave = self.connection.slave_for('mymaster', db=redis_db)
Use config redis database in sentinel connections
Use config redis database in sentinel connections
Python
agpl-3.0
inteligencia-coletiva-lsd/pybossa,geotagx/pybossa,jean/pybossa,PyBossa/pybossa,PyBossa/pybossa,stefanhahmann/pybossa,OpenNewsLabs/pybossa,OpenNewsLabs/pybossa,stefanhahmann/pybossa,Scifabric/pybossa,geotagx/pybossa,jean/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa
from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) self.master = self.connection.master_for('mymaster') self.slave = self.connection.slave_for('mymaster') Use config redis database in sentinel connections
from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) redis_db = app.config['REDIS_DB'] or 0 print "Redis db is ", redis_db self.master = self.connection.master_for('mymaster', db=redis_db) self.slave = self.connection.slave_for('mymaster', db=redis_db)
<commit_before>from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) self.master = self.connection.master_for('mymaster') self.slave = self.connection.slave_for('mymaster') <commit_msg>Use config redis database in sentinel connections<commit_after>
from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) redis_db = app.config['REDIS_DB'] or 0 print "Redis db is ", redis_db self.master = self.connection.master_for('mymaster', db=redis_db) self.slave = self.connection.slave_for('mymaster', db=redis_db)
from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) self.master = self.connection.master_for('mymaster') self.slave = self.connection.slave_for('mymaster') Use config redis database in sentinel connectionsfrom redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) redis_db = app.config['REDIS_DB'] or 0 print "Redis db is ", redis_db self.master = self.connection.master_for('mymaster', db=redis_db) self.slave = self.connection.slave_for('mymaster', db=redis_db)
<commit_before>from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) self.master = self.connection.master_for('mymaster') self.slave = self.connection.slave_for('mymaster') <commit_msg>Use config redis database in sentinel connections<commit_after>from redis import sentinel class Sentinel(object): def __init__(self, app=None): self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'], socket_timeout=0.1) redis_db = app.config['REDIS_DB'] or 0 print "Redis db is ", redis_db self.master = self.connection.master_for('mymaster', db=redis_db) self.slave = self.connection.slave_for('mymaster', db=redis_db)
695dad10b6d27e2b45a7b98abad29b9d922b976f
pylisp/packet/ip/protocol.py
pylisp/packet/ip/protocol.py
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes())
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
Split Protocol class in Protocol and ProtocolElement
Split Protocol class in Protocol and ProtocolElement
Python
bsd-3-clause
steffann/pylisp
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes()) Split Protocol class in Protocol and ProtocolElement
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
<commit_before>''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes()) <commit_msg>Split Protocol class in Protocol and ProtocolElement<commit_after>
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes()) Split Protocol class in Protocol and ProtocolElement''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
<commit_before>''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class Protocol(object): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' self.next_header = next_header self.payload = payload def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' def __str__(self): return str(self.to_bytes()) def __bytes__(self): return bytes(self.to_bytes()) <commit_msg>Split Protocol class in Protocol and ProtocolElement<commit_after>''' Created on 11 jan. 2013 @author: sander ''' from abc import abstractmethod, ABCMeta class ProtocolElement(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): ''' Constructor ''' def __repr__(self): # This works as long as we accept all properties as paramters in the # constructor params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(params)) def __str__(self): return str(self.to_bytes()) def __bytes__(self): return self.to_bytes() @abstractmethod def sanitize(self): ''' Check and optionally fix properties ''' @classmethod @abstractmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' @abstractmethod def to_bytes(self): ''' Create bytes from properties ''' class Protocol(ProtocolElement): __metaclass__ = ABCMeta header_type = None @abstractmethod def __init__(self, next_header=None, payload=''): ''' Constructor ''' super(Protocol, self).__init__() self.next_header = next_header self.payload = payload
90d1a40175675b2950cb41b85434b522d6e21c4d
mass/cli.py
mass/cli.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() def init(): utils.register_domain() utils.register_workflow_type() utils.register_activity_type() @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() @click.option('-d', '--domain', help='Amazon SWF Domain.') @click.option('-r', '--region', help='Amazon Region.') def init(domain, region): utils.register_domain(domain, region) utils.register_workflow_type(domain, region) utils.register_activity_type(domain, region) @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
Add arguments --domain and --region to mass init.
Add arguments --domain and --region to mass init.
Python
apache-2.0
badboy99tw/mass,KKBOX/mass,badboy99tw/mass,badboy99tw/mass,KKBOX/mass,KKBOX/mass
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() def init(): utils.register_domain() utils.register_workflow_type() utils.register_activity_type() @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor) Add arguments --domain and --region to mass init.
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() @click.option('-d', '--domain', help='Amazon SWF Domain.') @click.option('-r', '--region', help='Amazon Region.') def init(domain, region): utils.register_domain(domain, region) utils.register_workflow_type(domain, region) utils.register_activity_type(domain, region) @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
<commit_before>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() def init(): utils.register_domain() utils.register_workflow_type() utils.register_activity_type() @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor) <commit_msg>Add arguments --domain and --region to mass init.<commit_after>
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() @click.option('-d', '--domain', help='Amazon SWF Domain.') @click.option('-r', '--region', help='Amazon Region.') def init(domain, region): utils.register_domain(domain, region) utils.register_workflow_type(domain, region) utils.register_activity_type(domain, region) @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() def init(): utils.register_domain() utils.register_workflow_type() utils.register_activity_type() @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor) Add arguments --domain and --region to mass init.#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() @click.option('-d', '--domain', help='Amazon SWF Domain.') @click.option('-r', '--region', help='Amazon Region.') def init(domain, region): utils.register_domain(domain, region) utils.register_workflow_type(domain, region) utils.register_activity_type(domain, region) @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
<commit_before>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() def init(): utils.register_domain() utils.register_workflow_type() utils.register_activity_type() @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor) <commit_msg>Add arguments --domain and --region to mass init.<commit_after>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() @click.option('-d', '--domain', help='Amazon SWF Domain.') @click.option('-r', '--region', help='Amazon Region.') def init(domain, region): utils.register_domain(domain, region) utils.register_workflow_type(domain, region) utils.register_activity_type(domain, region) @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
75457e90381b6ff38becfed0befe214c9d6c0fc1
skyscanner/__init__.py
skyscanner/__init__.py
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.3' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.4' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
Include skyscanner version to be used by doc.
Include skyscanner version to be used by doc.
Python
apache-2.0
Skyscanner/skyscanner-python-sdk
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.3' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ Include skyscanner version to be used by doc.
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.4' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
<commit_before># -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.3' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ <commit_msg>Include skyscanner version to be used by doc.<commit_after>
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.4' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.3' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ Include skyscanner version to be used by doc.# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.4' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
<commit_before># -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.3' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ <commit_msg>Include skyscanner version to be used by doc.<commit_after># -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '1.1.4' __copyright__ = "Copyright (C) 2016 Skyscanner Ltd" __license__ = """ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
a1c7773eb889ece3233b910c559b4e22ade3bb32
timpani/settings.py
timpani/settings.py
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): valid = validateSetting(name, value) if valid: databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() return True return False def validateSetting(name, value): if name == "title": return len(value) > 0
Use setting validation function in setSettingValue
Use setting validation function in setSettingValue
Python
mit
ollien/Timpani,ollien/Timpani,ollien/Timpani
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0 Use setting validation function in setSettingValue
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): valid = validateSetting(name, value) if valid: databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() return True return False def validateSetting(name, value): if name == "title": return len(value) > 0
<commit_before>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0 <commit_msg>Use setting validation function in setSettingValue<commit_after>
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): valid = validateSetting(name, value) if valid: databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() return True return False def validateSetting(name, value): if name == "title": return len(value) > 0
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0 Use setting validation function in setSettingValuefrom . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): valid = validateSetting(name, value) if valid: databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() return True return False def validateSetting(name, value): if name == "title": return len(value) > 0
<commit_before>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() def validateSetting(name, value): if name == "title": return len(value) > 0 <commit_msg>Use setting validation function in setSettingValue<commit_after>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = (databaseConnection.session .query(database.tables.Setting) .filter(database.tables.Setting.name == name)) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): valid = validateSetting(name, value) if valid: databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit() return True return False def validateSetting(name, value): if name == "title": return len(value) > 0
d6224333a8c815086cf8f10b0bb9ee23f7aec3ef
numpy/fft/info.py
numpy/fft/info.py
"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs refft irefft refft2 irefft2 refftn irefftn Hermite FFTs hfft ihfft """ depends = ['core']
"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs rfft irfft rfft2 irfft2 rfftn irfftn Hermite FFTs hfft ihfft """ depends = ['core']
Fix documentation of fft sub-package to eliminate references to refft.
Fix documentation of fft sub-package to eliminate references to refft. git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3226 94b884b6-d6fd-0310-90d3-974f1d3f35e1
Python
bsd-3-clause
illume/numpy3k,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,illume/numpy3k,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,efiring/numpy-work,efiring/numpy-work,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,efiring/numpy-work,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,illume/numpy3k,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,efiring/numpy-work,chadnetzer/numpy-gaurdro,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint
"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs refft irefft refft2 irefft2 refftn irefftn Hermite FFTs hfft ihfft """ depends = ['core'] Fix documentation of fft sub-package to eliminate references to refft. git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3226 94b884b6-d6fd-0310-90d3-974f1d3f35e1
"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs rfft irfft rfft2 irfft2 rfftn irfftn Hermite FFTs hfft ihfft """ depends = ['core']
<commit_before>"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs refft irefft refft2 irefft2 refftn irefftn Hermite FFTs hfft ihfft """ depends = ['core'] <commit_msg>Fix documentation of fft sub-package to eliminate references to refft. git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3226 94b884b6-d6fd-0310-90d3-974f1d3f35e1<commit_after>
"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs rfft irfft rfft2 irfft2 rfftn irfftn Hermite FFTs hfft ihfft """ depends = ['core']
"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs refft irefft refft2 irefft2 refftn irefftn Hermite FFTs hfft ihfft """ depends = ['core'] Fix documentation of fft sub-package to eliminate references to refft. git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3226 94b884b6-d6fd-0310-90d3-974f1d3f35e1"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs rfft irfft rfft2 irfft2 rfftn irfftn Hermite FFTs hfft ihfft """ depends = ['core']
<commit_before>"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs refft irefft refft2 irefft2 refftn irefftn Hermite FFTs hfft ihfft """ depends = ['core'] <commit_msg>Fix documentation of fft sub-package to eliminate references to refft. git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3226 94b884b6-d6fd-0310-90d3-974f1d3f35e1<commit_after>"""\ Core FFT routines ================== Standard FFTs fft ifft fft2 ifft2 fftn ifftn Real FFTs rfft irfft rfft2 irfft2 rfftn irfftn Hermite FFTs hfft ihfft """ depends = ['core']
ee99527185268ac386aad0c54056ac640c197e42
dbmigrator/commands/init.py
dbmigrator/commands/init.py
# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ CREATE TABLE IF NOT EXISTS schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") cursor.execute("""\ DELETE FROM schema_migrations""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command
# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ SELECT 1 FROM information_schema.tables WHERE table_name = 'schema_migrations'""") table_exists = cursor.fetchone() if table_exists: print('Schema migrations already initialized.') return cursor.execute("""\ CREATE TABLE schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command
Stop changing schema_migrations data if the table already exists
Stop changing schema_migrations data if the table already exists
Python
agpl-3.0
karenc/db-migrator
# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ CREATE TABLE IF NOT EXISTS schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") cursor.execute("""\ DELETE FROM schema_migrations""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command Stop changing schema_migrations data if the table already exists
# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ SELECT 1 FROM information_schema.tables WHERE table_name = 'schema_migrations'""") table_exists = cursor.fetchone() if table_exists: print('Schema migrations already initialized.') return cursor.execute("""\ CREATE TABLE schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command
<commit_before># -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ CREATE TABLE IF NOT EXISTS schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") cursor.execute("""\ DELETE FROM schema_migrations""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command <commit_msg>Stop changing schema_migrations data if the table already exists<commit_after>
# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ SELECT 1 FROM information_schema.tables WHERE table_name = 'schema_migrations'""") table_exists = cursor.fetchone() if table_exists: print('Schema migrations already initialized.') return cursor.execute("""\ CREATE TABLE schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command
# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ CREATE TABLE IF NOT EXISTS schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") cursor.execute("""\ DELETE FROM schema_migrations""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command Stop changing schema_migrations data if the table already exists# -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ SELECT 1 FROM information_schema.tables WHERE table_name = 'schema_migrations'""") table_exists = cursor.fetchone() if table_exists: print('Schema migrations already initialized.') return cursor.execute("""\ CREATE TABLE schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command
<commit_before># -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ CREATE TABLE IF NOT EXISTS schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") cursor.execute("""\ DELETE FROM schema_migrations""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command <commit_msg>Stop changing schema_migrations data if the table already exists<commit_after># -*- coding: utf-8 -*- # ### # Copyright (c) 2015, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from .. import utils __all__ = ('cli_loader',) @utils.with_cursor def cli_command(cursor, migrations_directory='', version=None, **kwargs): cursor.execute("""\ SELECT 1 FROM information_schema.tables WHERE table_name = 'schema_migrations'""") table_exists = cursor.fetchone() if table_exists: print('Schema migrations already initialized.') return cursor.execute("""\ CREATE TABLE schema_migrations ( version TEXT NOT NULL, applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP )""") versions = [] if version is None: timestamp = utils.timestamp() else: timestamp = str(version) for version, name in utils.get_migrations(migrations_directory): if version <= timestamp: versions.append((version,)) cursor.executemany("""\ INSERT INTO schema_migrations VALUES (%s) """, versions) print('Schema migrations initialized.') def cli_loader(parser): parser.add_argument('--version', type=int, help='Set the schema version to VERSION, ' 'default current timestamp') return cli_command
30f03692eff862f1456b9c376c21fe8e57de7eaa
dbt/clients/agate_helper.py
dbt/clients/agate_helper.py
import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Number(), agate.data_types.Date(), agate.data_types.DateTime(), agate.data_types.Boolean(), agate.data_types.Text() ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Boolean(true_values=('true',), false_values=('false',), null_values=('null',)), agate.data_types.Number(null_values=('null',)), agate.data_types.TimeDelta(null_values=('null',)), agate.data_types.Date(null_values=('null',)), agate.data_types.DateTime(null_values=('null',)), agate.data_types.Text(null_values=('null',)) ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
Make the agate table type tester more restrictive on what counts as null/true/false
Make the agate table type tester more restrictive on what counts as null/true/false
Python
apache-2.0
analyst-collective/dbt,nave91/dbt,nave91/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,analyst-collective/dbt
import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Number(), agate.data_types.Date(), agate.data_types.DateTime(), agate.data_types.Boolean(), agate.data_types.Text() ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER) Make the agate table type tester more restrictive on what counts as null/true/false
import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Boolean(true_values=('true',), false_values=('false',), null_values=('null',)), agate.data_types.Number(null_values=('null',)), agate.data_types.TimeDelta(null_values=('null',)), agate.data_types.Date(null_values=('null',)), agate.data_types.DateTime(null_values=('null',)), agate.data_types.Text(null_values=('null',)) ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
<commit_before> import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Number(), agate.data_types.Date(), agate.data_types.DateTime(), agate.data_types.Boolean(), agate.data_types.Text() ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER) <commit_msg>Make the agate table type tester more restrictive on what counts as null/true/false<commit_after>
import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Boolean(true_values=('true',), false_values=('false',), null_values=('null',)), agate.data_types.Number(null_values=('null',)), agate.data_types.TimeDelta(null_values=('null',)), agate.data_types.Date(null_values=('null',)), agate.data_types.DateTime(null_values=('null',)), agate.data_types.Text(null_values=('null',)) ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Number(), agate.data_types.Date(), agate.data_types.DateTime(), agate.data_types.Boolean(), agate.data_types.Text() ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER) Make the agate table type tester more restrictive on what counts as null/true/false import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Boolean(true_values=('true',), false_values=('false',), null_values=('null',)), agate.data_types.Number(null_values=('null',)), agate.data_types.TimeDelta(null_values=('null',)), agate.data_types.Date(null_values=('null',)), agate.data_types.DateTime(null_values=('null',)), agate.data_types.Text(null_values=('null',)) ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
<commit_before> import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Number(), agate.data_types.Date(), agate.data_types.DateTime(), agate.data_types.Boolean(), agate.data_types.Text() ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER) <commit_msg>Make the agate table type tester more restrictive on what counts as null/true/false<commit_after> import agate DEFAULT_TYPE_TESTER = agate.TypeTester(types=[ agate.data_types.Boolean(true_values=('true',), false_values=('false',), null_values=('null',)), agate.data_types.Number(null_values=('null',)), agate.data_types.TimeDelta(null_values=('null',)), agate.data_types.Date(null_values=('null',)), agate.data_types.DateTime(null_values=('null',)), agate.data_types.Text(null_values=('null',)) ]) def table_from_data(data, column_names): "Convert list of dictionaries into an Agate table" # The agate table is generated from a list of dicts, so the column order # from `data` is not preserved. We can use `select` to reorder the columns # # If there is no data, create an empty table with the specified columns if len(data) == 0: return agate.Table([], column_names=column_names) else: table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) return table.select(column_names) def empty_table(): "Returns an empty Agate table. To be used in place of None" return agate.Table(rows=[]) def as_matrix(table): "Return an agate table as a matrix of data sans columns" return [r.values() for r in table.rows.values()] def from_csv(abspath): return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
52fa6cff088e2032fc8a3a9d732bf8affb9bccae
config/template.py
config/template.py
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = ''
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
Allow for representative view display with sample configuration
Allow for representative view display with sample configuration
Python
mit
AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' Allow for representative view display with sample configuration
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
<commit_before>DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' <commit_msg>Allow for representative view display with sample configuration<commit_after>
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' Allow for representative view display with sample configurationDB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
<commit_before>DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' <commit_msg>Allow for representative view display with sample configuration<commit_after>DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
068862dc72fa82ec35e7fabc6a0a99dc10f7f034
octavia/common/service.py
octavia/common/service.py
# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config from octavia.i18n import _LI LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) LOG.info(_LI('Starting Octavia API server')) log.set_defaults() config.setup_logging(cfg.CONF)
# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) log.set_defaults() config.setup_logging(cfg.CONF)
Remove bad INFO log "Starting Octavia API server"
Remove bad INFO log "Starting Octavia API server" This log is also display for health_manager and house_keeping service. Api service already display "Starting API server on..." in INFO level. Change-Id: I0a3ff91b556accdfadbad797488d17ae7a95d85b
Python
apache-2.0
openstack/octavia,openstack/octavia,openstack/octavia
# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config from octavia.i18n import _LI LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) LOG.info(_LI('Starting Octavia API server')) log.set_defaults() config.setup_logging(cfg.CONF) Remove bad INFO log "Starting Octavia API server" This log is also display for health_manager and house_keeping service. Api service already display "Starting API server on..." in INFO level. Change-Id: I0a3ff91b556accdfadbad797488d17ae7a95d85b
# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) log.set_defaults() config.setup_logging(cfg.CONF)
<commit_before># Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config from octavia.i18n import _LI LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) LOG.info(_LI('Starting Octavia API server')) log.set_defaults() config.setup_logging(cfg.CONF) <commit_msg>Remove bad INFO log "Starting Octavia API server" This log is also display for health_manager and house_keeping service. Api service already display "Starting API server on..." in INFO level. Change-Id: I0a3ff91b556accdfadbad797488d17ae7a95d85b<commit_after>
# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) log.set_defaults() config.setup_logging(cfg.CONF)
# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config from octavia.i18n import _LI LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) LOG.info(_LI('Starting Octavia API server')) log.set_defaults() config.setup_logging(cfg.CONF) Remove bad INFO log "Starting Octavia API server" This log is also display for health_manager and house_keeping service. Api service already display "Starting API server on..." in INFO level. Change-Id: I0a3ff91b556accdfadbad797488d17ae7a95d85b# Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) log.set_defaults() config.setup_logging(cfg.CONF)
<commit_before># Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config from octavia.i18n import _LI LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) LOG.info(_LI('Starting Octavia API server')) log.set_defaults() config.setup_logging(cfg.CONF) <commit_msg>Remove bad INFO log "Starting Octavia API server" This log is also display for health_manager and house_keeping service. Api service already display "Starting API server on..." in INFO level. Change-Id: I0a3ff91b556accdfadbad797488d17ae7a95d85b<commit_after># Copyright 2014 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log from octavia.common import config LOG = log.getLogger(__name__) def prepare_service(argv=None): """Sets global config from config file and sets up logging.""" argv = argv or [] config.init(argv[1:]) log.set_defaults() config.setup_logging(cfg.CONF)
d568e040293da3438293fb007a762fd35b8c7483
extras/client.py
extras/client.py
import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='hmac-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content)
import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('extras/example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='rsa-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content)
Fix path, use rsa-sha256 algorithm to actually use the rsa-base verification
Fix path, use rsa-sha256 algorithm to actually use the rsa-base verification
Python
bsd-3-clause
keybar/keybar
import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='hmac-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content) Fix path, use rsa-sha256 algorithm to actually use the rsa-base verification
import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('extras/example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='rsa-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content)
<commit_before>import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='hmac-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content) <commit_msg>Fix path, use rsa-sha256 algorithm to actually use the rsa-base verification<commit_after>
import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('extras/example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='rsa-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content)
import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='hmac-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content) Fix path, use rsa-sha256 algorithm to actually use the rsa-base verificationimport os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('extras/example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='rsa-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content)
<commit_before>import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='hmac-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content) <commit_msg>Fix path, use rsa-sha256 algorithm to actually use the rsa-base verification<commit_after>import os from email.utils import formatdate from datetime import datetime from time import mktime os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'keybar.settings') import django django.setup() from django.conf import settings from httpsig.requests_auth import HTTPSignatureAuth import requests from keybar.models.user import User # TODO: Use a secret RSA key as secret. secret = open('extras/example_keys/private_key.pem', 'rb').read() user = User.objects.get(username='admin') signature_headers = ['(request-target)', 'accept', 'date', 'host'] now = datetime.now() stamp = mktime(now.timetuple()) headers = { 'Host': 'keybar.local:8443', 'Method': 'GET', 'Path': '/api/v1/users/', 'Accept': 'application/json', 'X-Api-Key': user.api_key.hex, 'Date': formatdate(timeval=stamp, localtime=False, usegmt=True) } auth = HTTPSignatureAuth( key_id=user.api_key.hex, secret=secret, headers=signature_headers, algorithm='rsa-sha256') response = requests.get( 'https://keybar.local:8443/api/v1/users/', auth=auth, headers=headers, verify=settings.KEYBAR_CA_BUNDLE) print(response.content)
41c6b1820e8b23079d9098526854c9a60859d128
gcloud_expenses/test_views.py
gcloud_expenses/test_views.py
import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_my_view(self): from pyramid import testing from .views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['project'], 'foo')
import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_home_page(self): from pyramid import testing from .views import home_page request = testing.DummyRequest() info = home_page(request) self.assertEqual(info, {})
Fix test broken in rename.
Fix test broken in rename.
Python
apache-2.0
GoogleCloudPlatform/google-cloud-python-expenses-demo,GoogleCloudPlatform/google-cloud-python-expenses-demo
import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_my_view(self): from pyramid import testing from .views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['project'], 'foo') Fix test broken in rename.
import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_home_page(self): from pyramid import testing from .views import home_page request = testing.DummyRequest() info = home_page(request) self.assertEqual(info, {})
<commit_before>import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_my_view(self): from pyramid import testing from .views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['project'], 'foo') <commit_msg>Fix test broken in rename.<commit_after>
import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_home_page(self): from pyramid import testing from .views import home_page request = testing.DummyRequest() info = home_page(request) self.assertEqual(info, {})
import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_my_view(self): from pyramid import testing from .views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['project'], 'foo') Fix test broken in rename.import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_home_page(self): from pyramid import testing from .views import home_page request = testing.DummyRequest() info = home_page(request) self.assertEqual(info, {})
<commit_before>import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_my_view(self): from pyramid import testing from .views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['project'], 'foo') <commit_msg>Fix test broken in rename.<commit_after>import unittest class ViewTests(unittest.TestCase): def setUp(self): from pyramid import testing self.config = testing.setUp() def tearDown(self): from pyramid import testing testing.tearDown() def test_home_page(self): from pyramid import testing from .views import home_page request = testing.DummyRequest() info = home_page(request) self.assertEqual(info, {})
4691e024986a24930c88646cf3a3ae95683dc880
main.py
main.py
"""pluss, a feed proxy for G+""" import logging from pluss.app import app @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et:
"""pluss, a feed proxy for G+""" import logging from pluss.app import app from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et:
Add proxyfix for X-Forwarded-For header
Add proxyfix for X-Forwarded-For header
Python
mit
ayust/pluss,ayust/pluss
"""pluss, a feed proxy for G+""" import logging from pluss.app import app @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et: Add proxyfix for X-Forwarded-For header
"""pluss, a feed proxy for G+""" import logging from pluss.app import app from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et:
<commit_before>"""pluss, a feed proxy for G+""" import logging from pluss.app import app @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et: <commit_msg>Add proxyfix for X-Forwarded-For header<commit_after>
"""pluss, a feed proxy for G+""" import logging from pluss.app import app from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et:
"""pluss, a feed proxy for G+""" import logging from pluss.app import app @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et: Add proxyfix for X-Forwarded-For header"""pluss, a feed proxy for G+""" import logging from pluss.app import app from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et:
<commit_before>"""pluss, a feed proxy for G+""" import logging from pluss.app import app @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et: <commit_msg>Add proxyfix for X-Forwarded-For header<commit_after>"""pluss, a feed proxy for G+""" import logging from pluss.app import app from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) @app.before_first_request def setup_logging(): if not app.debug: # In production mode, add log handler to sys.stderr. handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( "%(asctime)s [%(process)d] [%(levelname)s] %(pathname)s:%(lineno)d %(message)s", "%Y-%m-%d %H:%M:%S", )) app.logger.addHandler(handler) app.logger.setLevel(logging.WARNING) if __name__ == '__main__': app.run(host='pluss.aiiane.com', port=54321, debug=True) # vim: set ts=4 sts=4 sw=4 et:
769c83564d5f2272837c2fbea6d781110b71b8ca
main.py
main.py
from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: if len(vectors[0]) == 2: display_source(vectors) clusters = kmeans(vectors, clusters_count=clusters_count) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main()
from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: clusters = kmeans(vectors, clusters_count=clusters_count) if len(vectors[0]) == 2: display_source(vectors) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main()
Fix trying to display result in case of not 2D vectors
Fix trying to display result in case of not 2D vectors
Python
mit
vanashimko/k-means
from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: if len(vectors[0]) == 2: display_source(vectors) clusters = kmeans(vectors, clusters_count=clusters_count) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main() Fix trying to display result in case of not 2D vectors
from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: clusters = kmeans(vectors, clusters_count=clusters_count) if len(vectors[0]) == 2: display_source(vectors) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main()
<commit_before>from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: if len(vectors[0]) == 2: display_source(vectors) clusters = kmeans(vectors, clusters_count=clusters_count) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main() <commit_msg>Fix trying to display result in case of not 2D vectors<commit_after>
from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: clusters = kmeans(vectors, clusters_count=clusters_count) if len(vectors[0]) == 2: display_source(vectors) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main()
from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: if len(vectors[0]) == 2: display_source(vectors) clusters = kmeans(vectors, clusters_count=clusters_count) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main() Fix trying to display result in case of not 2D vectorsfrom sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: clusters = kmeans(vectors, clusters_count=clusters_count) if len(vectors[0]) == 2: display_source(vectors) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main()
<commit_before>from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: if len(vectors[0]) == 2: display_source(vectors) clusters = kmeans(vectors, clusters_count=clusters_count) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main() <commit_msg>Fix trying to display result in case of not 2D vectors<commit_after>from sys import argv, stderr from drawer import * from kmeans import kmeans def read_vectors(file_name): result = None with open(file_name, 'r') as f: vector_length = int(f.readline()) vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines())) if all((len(x) == vector_length for x in vectors)): result = vectors return result def main(): vectors = read_vectors(argv[1]) clusters_count = int(argv[2]) if vectors: clusters = kmeans(vectors, clusters_count=clusters_count) if len(vectors[0]) == 2: display_source(vectors) display_result(vectors, clusters) else: print('Invalid input', file=stderr) if __name__ == '__main__': main()
03430a5b0abbd051e878274a669edf5afaa656b3
sc2/helpers/control_group.py
sc2/helpers/control_group.py
class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0
class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 def add_unit(self, units): self.add(unit.tag) def add_units(self, units): for unit in units: self.add_unit(unit) def remove_unit(self, units): self.remove(unit.tag) def remove_units(self, units): for unit in units: self.remove(unit.tag)
Add modification operations to control groups
Add modification operations to control groups
Python
mit
Dentosal/python-sc2
class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 Add modification operations to control groups
class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 def add_unit(self, units): self.add(unit.tag) def add_units(self, units): for unit in units: self.add_unit(unit) def remove_unit(self, units): self.remove(unit.tag) def remove_units(self, units): for unit in units: self.remove(unit.tag)
<commit_before>class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 <commit_msg>Add modification operations to control groups<commit_after>
class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 def add_unit(self, units): self.add(unit.tag) def add_units(self, units): for unit in units: self.add_unit(unit) def remove_unit(self, units): self.remove(unit.tag) def remove_units(self, units): for unit in units: self.remove(unit.tag)
class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 Add modification operations to control groupsclass ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 def add_unit(self, units): self.add(unit.tag) def add_units(self, units): for unit in units: self.add_unit(unit) def remove_unit(self, units): self.remove(unit.tag) def remove_units(self, units): for unit in units: self.remove(unit.tag)
<commit_before>class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 <commit_msg>Add modification operations to control groups<commit_after>class ControlGroup(set): def __init__(self, units): super().__init__({unit.tag for unit in units}) def __hash__(self): return hash(tuple(sorted(list(self)))) def select_units(self, units): return units.filter(lambda unit: unit.tag in self) def missing_unit_tags(self, units): return {t for t in self if units.find_by_tag(t) is None} @property def empty(self): return self.amount == 0 def add_unit(self, units): self.add(unit.tag) def add_units(self, units): for unit in units: self.add_unit(unit) def remove_unit(self, units): self.remove(unit.tag) def remove_units(self, units): for unit in units: self.remove(unit.tag)
6820de9ccdb7cc7263142108881cf98aab85adb1
space-age/space_age.py
space-age/space_age.py
# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): return round((self._seconds / 31557600) * 0.240846, 2) obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury())
# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): planet = self.on_earth() * 0.2408467 return planet def on_venus(self): planet = self.on_earth() * 0.61519726 return planet def on_mars(self): planet = self.on_earth() * 1.8808158 return planet def on_jupiter(self): planet = self.on_earth() * 11.862615 return planet def on_saturn(self): planet = self.on_earth() * 29.447498 return planet def on_uranus(self): planet = self.on_earth() * 84.016846 return planet def on_neptune(self): planet = self.on_earth() * 164.79132 return planet obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury())
Add other planets age function
Add other planets age function
Python
mit
amalshehu/exercism-python
# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): return round((self._seconds / 31557600) * 0.240846, 2) obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury()) Add other planets age function
# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): planet = self.on_earth() * 0.2408467 return planet def on_venus(self): planet = self.on_earth() * 0.61519726 return planet def on_mars(self): planet = self.on_earth() * 1.8808158 return planet def on_jupiter(self): planet = self.on_earth() * 11.862615 return planet def on_saturn(self): planet = self.on_earth() * 29.447498 return planet def on_uranus(self): planet = self.on_earth() * 84.016846 return planet def on_neptune(self): planet = self.on_earth() * 164.79132 return planet obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury())
<commit_before># File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): return round((self._seconds / 31557600) * 0.240846, 2) obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury()) <commit_msg>Add other planets age function<commit_after>
# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): planet = self.on_earth() * 0.2408467 return planet def on_venus(self): planet = self.on_earth() * 0.61519726 return planet def on_mars(self): planet = self.on_earth() * 1.8808158 return planet def on_jupiter(self): planet = self.on_earth() * 11.862615 return planet def on_saturn(self): planet = self.on_earth() * 29.447498 return planet def on_uranus(self): planet = self.on_earth() * 84.016846 return planet def on_neptune(self): planet = self.on_earth() * 164.79132 return planet obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury())
# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): return round((self._seconds / 31557600) * 0.240846, 2) obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury()) Add other planets age function# File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): planet = self.on_earth() * 0.2408467 return planet def on_venus(self): planet = self.on_earth() * 0.61519726 return planet def on_mars(self): planet = self.on_earth() * 1.8808158 return planet def on_jupiter(self): planet = self.on_earth() * 11.862615 return planet def on_saturn(self): planet = self.on_earth() * 29.447498 return planet def on_uranus(self): planet = self.on_earth() * 84.016846 return planet def on_neptune(self): planet = self.on_earth() * 164.79132 return planet obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury())
<commit_before># File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): return round((self._seconds / 31557600) * 0.240846, 2) obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury()) <commit_msg>Add other planets age function<commit_after># File: space_age.py # Purpose: Write a program that, given an age in seconds, calculates # how old someone is in terms of a given planet's solar years. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 06:09 PM class SpaceAge(object): """docstring for SpaceAge.""" def __init__(self, _seconds): self._seconds = _seconds def on_earth(self): return round((self._seconds / 31557600), 2) def on_mercury(self): planet = self.on_earth() * 0.2408467 return planet def on_venus(self): planet = self.on_earth() * 0.61519726 return planet def on_mars(self): planet = self.on_earth() * 1.8808158 return planet def on_jupiter(self): planet = self.on_earth() * 11.862615 return planet def on_saturn(self): planet = self.on_earth() * 29.447498 return planet def on_uranus(self): planet = self.on_earth() * 84.016846 return planet def on_neptune(self): planet = self.on_earth() * 164.79132 return planet obj = SpaceAge(1e6) print (obj.on_earth()) print (obj.on_mercury())
eb2b91d30244fd44b45ffc21b963256150b59152
frappe/patches/v11_0/reload_and_rename_view_log.py
frappe/patches/v11_0/reload_and_rename_view_log.py
import frappe def execute(): if frappe.db.exists('DocType', 'View log'): frappe.reload_doc('core', 'doctype', 'view_log', force=True) frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`") frappe.delete_doc('DocType', 'View log') frappe.reload_doc('core', 'doctype', 'view_log', force=True) else: frappe.reload_doc('core', 'doctype', 'view_log')
import frappe def execute(): if frappe.db.exists('DocType', 'View log'): # for mac users direct renaming would not work since mysql for mac saves table name in lower case # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 , # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names # here we are creating a temp table to store view log data frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`") # deleting old View log table frappe.db.sql("DROP table `tabView log`") frappe.delete_doc('DocType', 'View log') # reloading view log doctype to create `tabView Log` table frappe.reload_doc('core', 'doctype', 'view_log') frappe.db.commit() # Move the data to newly created `tabView Log` table frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`") # Delete temporary table frappe.db.sql("DROP table `ViewLogTemp`") else: frappe.reload_doc('core', 'doctype', 'view_log')
Fix rename view log patch for mac users
Fix rename view log patch for mac users for mac users direct renaming would not work since mysql for mac saves table name in lower case, so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
Python
mit
mhbu50/frappe,yashodhank/frappe,vjFaLk/frappe,adityahase/frappe,mhbu50/frappe,almeidapaulopt/frappe,saurabh6790/frappe,adityahase/frappe,vjFaLk/frappe,yashodhank/frappe,saurabh6790/frappe,frappe/frappe,frappe/frappe,vjFaLk/frappe,yashodhank/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,StrellaGroup/frappe,yashodhank/frappe,vjFaLk/frappe,mhbu50/frappe,adityahase/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,saurabh6790/frappe,frappe/frappe,saurabh6790/frappe,adityahase/frappe
import frappe def execute(): if frappe.db.exists('DocType', 'View log'): frappe.reload_doc('core', 'doctype', 'view_log', force=True) frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`") frappe.delete_doc('DocType', 'View log') frappe.reload_doc('core', 'doctype', 'view_log', force=True) else: frappe.reload_doc('core', 'doctype', 'view_log') Fix rename view log patch for mac users for mac users direct renaming would not work since mysql for mac saves table name in lower case, so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
import frappe def execute(): if frappe.db.exists('DocType', 'View log'): # for mac users direct renaming would not work since mysql for mac saves table name in lower case # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 , # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names # here we are creating a temp table to store view log data frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`") # deleting old View log table frappe.db.sql("DROP table `tabView log`") frappe.delete_doc('DocType', 'View log') # reloading view log doctype to create `tabView Log` table frappe.reload_doc('core', 'doctype', 'view_log') frappe.db.commit() # Move the data to newly created `tabView Log` table frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`") # Delete temporary table frappe.db.sql("DROP table `ViewLogTemp`") else: frappe.reload_doc('core', 'doctype', 'view_log')
<commit_before>import frappe def execute(): if frappe.db.exists('DocType', 'View log'): frappe.reload_doc('core', 'doctype', 'view_log', force=True) frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`") frappe.delete_doc('DocType', 'View log') frappe.reload_doc('core', 'doctype', 'view_log', force=True) else: frappe.reload_doc('core', 'doctype', 'view_log') <commit_msg>Fix rename view log patch for mac users for mac users direct renaming would not work since mysql for mac saves table name in lower case, so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names<commit_after>
import frappe def execute(): if frappe.db.exists('DocType', 'View log'): # for mac users direct renaming would not work since mysql for mac saves table name in lower case # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 , # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names # here we are creating a temp table to store view log data frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`") # deleting old View log table frappe.db.sql("DROP table `tabView log`") frappe.delete_doc('DocType', 'View log') # reloading view log doctype to create `tabView Log` table frappe.reload_doc('core', 'doctype', 'view_log') frappe.db.commit() # Move the data to newly created `tabView Log` table frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`") # Delete temporary table frappe.db.sql("DROP table `ViewLogTemp`") else: frappe.reload_doc('core', 'doctype', 'view_log')
import frappe def execute(): if frappe.db.exists('DocType', 'View log'): frappe.reload_doc('core', 'doctype', 'view_log', force=True) frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`") frappe.delete_doc('DocType', 'View log') frappe.reload_doc('core', 'doctype', 'view_log', force=True) else: frappe.reload_doc('core', 'doctype', 'view_log') Fix rename view log patch for mac users for mac users direct renaming would not work since mysql for mac saves table name in lower case, so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_namesimport frappe def execute(): if frappe.db.exists('DocType', 'View log'): # for mac users direct renaming would not work since mysql for mac saves table name in lower case # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 , # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names # here we are creating a temp table to store view log data frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`") # deleting old View log table frappe.db.sql("DROP table `tabView log`") frappe.delete_doc('DocType', 'View log') # reloading view log doctype to create `tabView Log` table frappe.reload_doc('core', 'doctype', 'view_log') frappe.db.commit() # Move the data to newly created `tabView Log` table frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`") # Delete temporary table frappe.db.sql("DROP table `ViewLogTemp`") else: frappe.reload_doc('core', 'doctype', 'view_log')
<commit_before>import frappe def execute(): if frappe.db.exists('DocType', 'View log'): frappe.reload_doc('core', 'doctype', 'view_log', force=True) frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`") frappe.delete_doc('DocType', 'View log') frappe.reload_doc('core', 'doctype', 'view_log', force=True) else: frappe.reload_doc('core', 'doctype', 'view_log') <commit_msg>Fix rename view log patch for mac users for mac users direct renaming would not work since mysql for mac saves table name in lower case, so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names<commit_after>import frappe def execute(): if frappe.db.exists('DocType', 'View log'): # for mac users direct renaming would not work since mysql for mac saves table name in lower case # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 , # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names # here we are creating a temp table to store view log data frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`") # deleting old View log table frappe.db.sql("DROP table `tabView log`") frappe.delete_doc('DocType', 'View log') # reloading view log doctype to create `tabView Log` table frappe.reload_doc('core', 'doctype', 'view_log') frappe.db.commit() # Move the data to newly created `tabView Log` table frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`") # Delete temporary table frappe.db.sql("DROP table `ViewLogTemp`") else: frappe.reload_doc('core', 'doctype', 'view_log')
53c39934e19fdad7926a8ad7833cd1737b47cf58
utilities/errors.py
utilities/errors.py
import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
Handle no snr information in snr file. (for fake simualtions mainly)
Handle no snr information in snr file. (for fake simualtions mainly)
Python
mit
jason-neal/companion_simulations,jason-neal/companion_simulations
import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors Handle no snr information in snr file. (for fake simualtions mainly)
import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
<commit_before>import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors <commit_msg>Handle no snr information in snr file. (for fake simualtions mainly)<commit_after>
import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors Handle no snr information in snr file. (for fake simualtions mainly)import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
<commit_before>import os import simulators import numpy as np import json """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip)) raise e def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors <commit_msg>Handle no snr information in snr file. (for fake simualtions mainly)<commit_after>import os import simulators import numpy as np import json import warnings """Calculate Errors on the Spectrum. For a first go using an fixed SNR of 200 for all observations. """ def get_snrinfo(star, obs_num, chip): """Load SNR info from json file.""" snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json") with open(snr_file, "r") as f: snr_data = json.load(f) try: return snr_data[str(star)][str(obs_num)][str(chip)] except KeyError as e: warnings.warn("No snr data present for {0}-{1}_{2}. " "Setting error to None instead".format(star, obs_num, chip)) return None def spectrum_error(star, obs_num, chip, error_off=False): """Return the spectrum error. errors = None will perform a normal chi**2 statistic. """ if error_off: errors = None else: snr = get_snrinfo(star, obs_num, chip) if snr is None: errors = None elif len(snr) == 1: errors = 1 / np.float(snr[0]) else: raise NotImplementedError("Haven't checked if an error array can be handled yet.") return errors
f1cabc889dd93e26295501097ac9cbf90890a1cd
solvent/config.py
solvent/config.py
import yaml import os LOCAL_OSMOSIS = 'localhost:1010' OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS
import yaml import os LOCAL_OSMOSIS_IF_ROOT = 'localhost:1010' LOCAL_OSMOSIS_IF_NOT_ROOT = 'localhost:1010' LOCAL_OSMOSIS = None OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") global LOCAL_OSMOSIS if LOCAL_OSMOSIS is None: if os.getuid() == 0: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_ROOT else: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_NOT_ROOT def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS
Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis
Bugfix: Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis
Python
apache-2.0
Stratoscale/solvent,Stratoscale/solvent
import yaml import os LOCAL_OSMOSIS = 'localhost:1010' OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS Bugfix: Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis
import yaml import os LOCAL_OSMOSIS_IF_ROOT = 'localhost:1010' LOCAL_OSMOSIS_IF_NOT_ROOT = 'localhost:1010' LOCAL_OSMOSIS = None OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") global LOCAL_OSMOSIS if LOCAL_OSMOSIS is None: if os.getuid() == 0: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_ROOT else: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_NOT_ROOT def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS
<commit_before>import yaml import os LOCAL_OSMOSIS = 'localhost:1010' OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS <commit_msg>Bugfix: Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis<commit_after>
import yaml import os LOCAL_OSMOSIS_IF_ROOT = 'localhost:1010' LOCAL_OSMOSIS_IF_NOT_ROOT = 'localhost:1010' LOCAL_OSMOSIS = None OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") global LOCAL_OSMOSIS if LOCAL_OSMOSIS is None: if os.getuid() == 0: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_ROOT else: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_NOT_ROOT def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS
import yaml import os LOCAL_OSMOSIS = 'localhost:1010' OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS Bugfix: Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosisimport yaml import os LOCAL_OSMOSIS_IF_ROOT = 'localhost:1010' LOCAL_OSMOSIS_IF_NOT_ROOT = 'localhost:1010' LOCAL_OSMOSIS = None OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") global LOCAL_OSMOSIS if LOCAL_OSMOSIS is None: if os.getuid() == 0: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_ROOT else: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_NOT_ROOT def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS
<commit_before>import yaml import os LOCAL_OSMOSIS = 'localhost:1010' OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS <commit_msg>Bugfix: Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis<commit_after>import yaml import os LOCAL_OSMOSIS_IF_ROOT = 'localhost:1010' LOCAL_OSMOSIS_IF_NOT_ROOT = 'localhost:1010' LOCAL_OSMOSIS = None OFFICIAL_OSMOSIS = None OFFICIAL_BUILD = False WITH_OFFICIAL_OBJECT_STORE = True CLEAN = False def load(filename): with open(filename) as f: data = yaml.load(f.read()) if data is None: raise Exception("Configuration file must not be empty") globals().update(data) if 'SOLVENT_CONFIG' in os.environ: data = yaml.load(os.environ['SOLVENT_CONFIG']) globals().update(data) if 'SOLVENT_CLEAN' in os.environ: global CLEAN CLEAN = True if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None: raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field") global LOCAL_OSMOSIS if LOCAL_OSMOSIS is None: if os.getuid() == 0: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_ROOT else: LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_NOT_ROOT def objectStoresOsmosisParameter(): if WITH_OFFICIAL_OBJECT_STORE: return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS else: return LOCAL_OSMOSIS
536801f970702792b0b23e4795929746ae2f92f8
src/ggrc/settings/default.py
src/ggrc/settings/default.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: DEBUG = False TESTING = False AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret')
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com DEBUG = False TESTING = False # Flask-SQLAlchemy fix to be less than `wait_time` in /etc/mysql/my.cnf SQLALCHEMY_POOL_RECYCLE = 120 # Settings in app.py AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret')
Set SQLALCHEMY_POOL_RECYCLE to be less than `wait_time`
Set SQLALCHEMY_POOL_RECYCLE to be less than `wait_time` * should fix "MySQL server has gone away" errors in development mode
Python
apache-2.0
j0gurt/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,2947721120/sagacious-capsicum,prasannav7/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,2947721120/sagacious-capsicum,VinnieJohns/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,hamyuan/ggrc-self-test,hyperNURb/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,ankit-collective/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,ankit-collective/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,2947721120/sagacious-capsicum,hamyuan/ggrc-self-test,edofic/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,2947721120/sagacious-capsicum,prasannav7/ggrc-core,ankit-collective/ggrc-core,hyperNURb/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,hamyuan/ggrc-self-test,hasanalom/ggrc-core,2947721120/sagacious-capsicum,plamut/ggrc-core,VinnieJohns/ggrc-core,ankit-collective/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,hamyuan/ggrc-self-test,ankit-collective/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,hamyuan/ggrc-self-test,plamut/ggrc-core,AleksNeStu/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: DEBUG = False TESTING = False AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret') Set SQLALCHEMY_POOL_RECYCLE to be less than `wait_time` * should fix "MySQL server has gone away" errors in development mode
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com DEBUG = False TESTING = False # Flask-SQLAlchemy fix to be less than `wait_time` in /etc/mysql/my.cnf SQLALCHEMY_POOL_RECYCLE = 120 # Settings in app.py AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret')
<commit_before> # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: DEBUG = False TESTING = False AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret') <commit_msg>Set SQLALCHEMY_POOL_RECYCLE to be less than `wait_time` * should fix "MySQL server has gone away" errors in development mode<commit_after>
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com DEBUG = False TESTING = False # Flask-SQLAlchemy fix to be less than `wait_time` in /etc/mysql/my.cnf SQLALCHEMY_POOL_RECYCLE = 120 # Settings in app.py AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret')
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: DEBUG = False TESTING = False AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret') Set SQLALCHEMY_POOL_RECYCLE to be less than `wait_time` * should fix "MySQL server has gone away" errors in development mode# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com DEBUG = False TESTING = False # Flask-SQLAlchemy fix to be less than `wait_time` in /etc/mysql/my.cnf SQLALCHEMY_POOL_RECYCLE = 120 # Settings in app.py AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret')
<commit_before> # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: DEBUG = False TESTING = False AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret') <commit_msg>Set SQLALCHEMY_POOL_RECYCLE to be less than `wait_time` * should fix "MySQL server has gone away" errors in development mode<commit_after># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com DEBUG = False TESTING = False # Flask-SQLAlchemy fix to be less than `wait_time` in /etc/mysql/my.cnf SQLALCHEMY_POOL_RECYCLE = 120 # Settings in app.py AUTOBUILD_ASSETS = False ENABLE_JASMINE = False FULLTEXT_INDEXER = None # Deployment-specific variables COMPANY = "Company, Inc." COMPANY_LOGO_TEXT = "Company GRC" VERSION = "s4" # Initialize from environment if present import os SQLALCHEMY_DATABASE_URI = os.environ.get('GGRC_DATABASE_URI', '') SECRET_KEY = os.environ.get('GGRC_SECRET_KEY', 'Replace-with-something-secret')
967240f95edb300d731f24cfb259a1fe4f3bdae5
webapp_health_monitor/management/commands/verify.py
webapp_health_monitor/management/commands/verify.py
import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if str(e) != "No module named '{}'".format(submodule): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME])
import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if not self._import_error_concerns_verificator(submodule, e): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) def _import_error_concerns_verificator(self, submodule, error): if sys.version_info >= (3, 0): return str(error) == "No module named '{}'".format(submodule) else: return error.message == "No module named {}".format( self.SUBMODULE_NAME)
Fix python2 django module importerror.
Fix python2 django module importerror.
Python
mit
pozytywnie/webapp-health-monitor
import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if str(e) != "No module named '{}'".format(submodule): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) Fix python2 django module importerror.
import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if not self._import_error_concerns_verificator(submodule, e): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) def _import_error_concerns_verificator(self, submodule, error): if sys.version_info >= (3, 0): return str(error) == "No module named '{}'".format(submodule) else: return error.message == "No module named {}".format( self.SUBMODULE_NAME)
<commit_before>import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if str(e) != "No module named '{}'".format(submodule): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) <commit_msg>Fix python2 django module importerror.<commit_after>
import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if not self._import_error_concerns_verificator(submodule, e): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) def _import_error_concerns_verificator(self, submodule, error): if sys.version_info >= (3, 0): return str(error) == "No module named '{}'".format(submodule) else: return error.message == "No module named {}".format( self.SUBMODULE_NAME)
import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if str(e) != "No module named '{}'".format(submodule): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) Fix python2 django module importerror.import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if not self._import_error_concerns_verificator(submodule, e): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) def _import_error_concerns_verificator(self, submodule, error): if sys.version_info >= (3, 0): return str(error) == "No module named '{}'".format(submodule) else: return error.message == "No module named {}".format( self.SUBMODULE_NAME)
<commit_before>import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if str(e) != "No module named '{}'".format(submodule): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) <commit_msg>Fix python2 django module importerror.<commit_after>import importlib import sys from django.apps import apps from django.core.management.base import BaseCommand from webapp_health_monitor.verification_suit import VerificationSuit class Command(BaseCommand): SUBMODULE_NAME = 'verificators' def handle(self, *args, **options): submodules = self._get_verificator_modules() for submodule in submodules: try: importlib.import_module(submodule) except ImportError as e: if not self._import_error_concerns_verificator(submodule, e): raise e result = VerificationSuit().run() self.stdout.write('{}\n'.format(result.report())) sys.exit(result.has_failed()) def _get_verificator_modules(self): for app in apps.get_app_configs(): yield '.'.join([app.module.__name__, self.SUBMODULE_NAME]) def _import_error_concerns_verificator(self, submodule, error): if sys.version_info >= (3, 0): return str(error) == "No module named '{}'".format(submodule) else: return error.message == "No module named {}".format( self.SUBMODULE_NAME)
80529d5032b6728adcaad426310c30b5e6366ad4
solution.py
solution.py
class Kiosk(): def __init__(self, visit_cost, location): self.visit_cost = visit_cost self.location = location print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' #patient should be Person def visit(self, patient): patient.money -= visit_cost #improve patient.diabetes #improve patient.cardio return True #Patient should be from class Person def filter(self, patient): if not patient.location == self.location: print "patient not at proper location" return False if not patient.money>self.visit_cost: print "patient cannot afford treatment" return False visit(self,patient)
class Kiosk(): def __init__(self, location, visit_cost, diabetes_threshold, cardio_threshold): self.location = location self.visit_cost = visit_cost self.diabetes_threshold = diabetes_threshold self.cardio_threshold = cardio_threshold #Initial cost to create kiosk: $5000. We are measuring in rupees self.money = -309900 print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' patient.money -= visit_cost kiosk.money += visit_cost #If we diagnose diabetes if patient.diabetes<diabetes_threshold: #For now, we ignore the details and just improve the patient's #health. patient.diabetes = diabetes_threshold #If we diagnose cardiovascular problems if patient.cardio<cardio_threshold: #For now, we ignore the details and just improve the patient's #health. patient.cardio = cardio_threshold #The visit was successful return True
Clean up and finish Kiosk class
Clean up and finish Kiosk class There was some redundancy because I merged it poorly
Python
bsd-3-clause
rkawauchi/IHK,rkawauchi/IHK
class Kiosk(): def __init__(self, visit_cost, location): self.visit_cost = visit_cost self.location = location print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' #patient should be Person def visit(self, patient): patient.money -= visit_cost #improve patient.diabetes #improve patient.cardio return True #Patient should be from class Person def filter(self, patient): if not patient.location == self.location: print "patient not at proper location" return False if not patient.money>self.visit_cost: print "patient cannot afford treatment" return False visit(self,patient) Clean up and finish Kiosk class There was some redundancy because I merged it poorly
class Kiosk(): def __init__(self, location, visit_cost, diabetes_threshold, cardio_threshold): self.location = location self.visit_cost = visit_cost self.diabetes_threshold = diabetes_threshold self.cardio_threshold = cardio_threshold #Initial cost to create kiosk: $5000. We are measuring in rupees self.money = -309900 print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' patient.money -= visit_cost kiosk.money += visit_cost #If we diagnose diabetes if patient.diabetes<diabetes_threshold: #For now, we ignore the details and just improve the patient's #health. patient.diabetes = diabetes_threshold #If we diagnose cardiovascular problems if patient.cardio<cardio_threshold: #For now, we ignore the details and just improve the patient's #health. patient.cardio = cardio_threshold #The visit was successful return True
<commit_before>class Kiosk(): def __init__(self, visit_cost, location): self.visit_cost = visit_cost self.location = location print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' #patient should be Person def visit(self, patient): patient.money -= visit_cost #improve patient.diabetes #improve patient.cardio return True #Patient should be from class Person def filter(self, patient): if not patient.location == self.location: print "patient not at proper location" return False if not patient.money>self.visit_cost: print "patient cannot afford treatment" return False visit(self,patient) <commit_msg>Clean up and finish Kiosk class There was some redundancy because I merged it poorly<commit_after>
class Kiosk(): def __init__(self, location, visit_cost, diabetes_threshold, cardio_threshold): self.location = location self.visit_cost = visit_cost self.diabetes_threshold = diabetes_threshold self.cardio_threshold = cardio_threshold #Initial cost to create kiosk: $5000. We are measuring in rupees self.money = -309900 print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' patient.money -= visit_cost kiosk.money += visit_cost #If we diagnose diabetes if patient.diabetes<diabetes_threshold: #For now, we ignore the details and just improve the patient's #health. patient.diabetes = diabetes_threshold #If we diagnose cardiovascular problems if patient.cardio<cardio_threshold: #For now, we ignore the details and just improve the patient's #health. patient.cardio = cardio_threshold #The visit was successful return True
class Kiosk(): def __init__(self, visit_cost, location): self.visit_cost = visit_cost self.location = location print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' #patient should be Person def visit(self, patient): patient.money -= visit_cost #improve patient.diabetes #improve patient.cardio return True #Patient should be from class Person def filter(self, patient): if not patient.location == self.location: print "patient not at proper location" return False if not patient.money>self.visit_cost: print "patient cannot afford treatment" return False visit(self,patient) Clean up and finish Kiosk class There was some redundancy because I merged it poorlyclass Kiosk(): def __init__(self, location, visit_cost, diabetes_threshold, cardio_threshold): self.location = location self.visit_cost = visit_cost self.diabetes_threshold = diabetes_threshold self.cardio_threshold = cardio_threshold #Initial cost to create kiosk: $5000. We are measuring in rupees self.money = -309900 print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' patient.money -= visit_cost kiosk.money += visit_cost #If we diagnose diabetes if patient.diabetes<diabetes_threshold: #For now, we ignore the details and just improve the patient's #health. patient.diabetes = diabetes_threshold #If we diagnose cardiovascular problems if patient.cardio<cardio_threshold: #For now, we ignore the details and just improve the patient's #health. patient.cardio = cardio_threshold #The visit was successful return True
<commit_before>class Kiosk(): def __init__(self, visit_cost, location): self.visit_cost = visit_cost self.location = location print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' #patient should be Person def visit(self, patient): patient.money -= visit_cost #improve patient.diabetes #improve patient.cardio return True #Patient should be from class Person def filter(self, patient): if not patient.location == self.location: print "patient not at proper location" return False if not patient.money>self.visit_cost: print "patient cannot afford treatment" return False visit(self,patient) <commit_msg>Clean up and finish Kiosk class There was some redundancy because I merged it poorly<commit_after>class Kiosk(): def __init__(self, location, visit_cost, diabetes_threshold, cardio_threshold): self.location = location self.visit_cost = visit_cost self.diabetes_threshold = diabetes_threshold self.cardio_threshold = cardio_threshold #Initial cost to create kiosk: $5000. We are measuring in rupees self.money = -309900 print 'initializing Kiosk' #patient shold be Person def visit(self, patient): if not patient.location == self.location: print 'patient not in correct location' return False if not patient.money>self.visit_cost: print 'patient cannot afford treatment' patient.money -= visit_cost kiosk.money += visit_cost #If we diagnose diabetes if patient.diabetes<diabetes_threshold: #For now, we ignore the details and just improve the patient's #health. patient.diabetes = diabetes_threshold #If we diagnose cardiovascular problems if patient.cardio<cardio_threshold: #For now, we ignore the details and just improve the patient's #health. patient.cardio = cardio_threshold #The visit was successful return True
4dbd402879a626d7fb1d6fe5108695d1eeb32c42
hiispider/servers/mixins/amqp.py
hiispider/servers/mixins/amqp.py
class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 100 self.amqp_setup = True
class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 1000 self.amqp_setup = True
Set prefetch count to 1000.
Set prefetch count to 1000.
Python
mit
hiidef/hiispider,hiidef/hiispider
class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 100 self.amqp_setup = True Set prefetch count to 1000.
class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 1000 self.amqp_setup = True
<commit_before>class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 100 self.amqp_setup = True <commit_msg>Set prefetch count to 1000.<commit_after>
class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 1000 self.amqp_setup = True
class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 100 self.amqp_setup = True Set prefetch count to 1000.class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 1000 self.amqp_setup = True
<commit_before>class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 100 self.amqp_setup = True <commit_msg>Set prefetch count to 1000.<commit_after>class AMQPMixin(object): amqp_setup = False def setupAMQP(self, config): if not self.amqp_setup: # Create AMQP Connection # AMQP connection parameters self.amqp_host = config["amqp_host"] self.amqp_port = config.get("amqp_port", 5672) self.amqp_username = config["amqp_username"] self.amqp_password = config["amqp_password"] self.amqp_queue = config["amqp_queue"] self.amqp_exchange = config["amqp_exchange"] self.amqp_prefetch_count = 1000 self.amqp_setup = True
be8344c2f796ecab60669630f4729c4ffa41c83b
web/impact/impact/v1/views/utils.py
web/impact/impact/v1/views/utils.py
def merge_data_by_id(data): result = {} for datum in data: id = datum["id"] item = result.get(id, {}) item.update(datum) result[id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data]
def coalesce_dictionaries(data, merge_field="id"): "Takes a sequence of dictionaries, merges those that share the same merge_field, and returns a list of resulting dictionaries" result = {} for datum in data: merge_id = datum[merge_field] item = result.get(merge_id, {}) item.update(datum) result[merge_id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data]
Rename merge_data_by_id, add doc-string, get rid of id as a local
[AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local
Python
mit
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
def merge_data_by_id(data): result = {} for datum in data: id = datum["id"] item = result.get(id, {}) item.update(datum) result[id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data] [AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local
def coalesce_dictionaries(data, merge_field="id"): "Takes a sequence of dictionaries, merges those that share the same merge_field, and returns a list of resulting dictionaries" result = {} for datum in data: merge_id = datum[merge_field] item = result.get(merge_id, {}) item.update(datum) result[merge_id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data]
<commit_before>def merge_data_by_id(data): result = {} for datum in data: id = datum["id"] item = result.get(id, {}) item.update(datum) result[id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data] <commit_msg>[AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local<commit_after>
def coalesce_dictionaries(data, merge_field="id"): "Takes a sequence of dictionaries, merges those that share the same merge_field, and returns a list of resulting dictionaries" result = {} for datum in data: merge_id = datum[merge_field] item = result.get(merge_id, {}) item.update(datum) result[merge_id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data]
def merge_data_by_id(data): result = {} for datum in data: id = datum["id"] item = result.get(id, {}) item.update(datum) result[id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data] [AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a localdef coalesce_dictionaries(data, merge_field="id"): "Takes a sequence of dictionaries, merges those that share the same merge_field, and returns a list of resulting dictionaries" result = {} for datum in data: merge_id = datum[merge_field] item = result.get(merge_id, {}) item.update(datum) result[merge_id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data]
<commit_before>def merge_data_by_id(data): result = {} for datum in data: id = datum["id"] item = result.get(id, {}) item.update(datum) result[id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data] <commit_msg>[AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local<commit_after>def coalesce_dictionaries(data, merge_field="id"): "Takes a sequence of dictionaries, merges those that share the same merge_field, and returns a list of resulting dictionaries" result = {} for datum in data: merge_id = datum[merge_field] item = result.get(merge_id, {}) item.update(datum) result[merge_id] = item return result.values() def map_data(klass, query, order, data_keys, output_keys): result = klass.objects.filter(query).order_by(order) data = result.values_list(*data_keys) return [dict(zip(output_keys, values)) for values in data]
028903036ac4bd3bf4a7b91ceda43a6c450f7e20
pipeline_notifier/main.py
pipeline_notifier/main.py
import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': 8080, 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()
import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': int(os.environ.get('PORT', '8080')), 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()
Use port from env if available
Use port from env if available
Python
mit
pimterry/pipeline-notifier
import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': 8080, 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()Use port from env if available
import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': int(os.environ.get('PORT', '8080')), 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()
<commit_before>import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': 8080, 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()<commit_msg>Use port from env if available<commit_after>
import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': int(os.environ.get('PORT', '8080')), 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()
import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': 8080, 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()Use port from env if availableimport os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': int(os.environ.get('PORT', '8080')), 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()
<commit_before>import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': 8080, 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()<commit_msg>Use port from env if available<commit_after>import os import cherrypy from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hello World!' def run_server(): cherrypy.tree.graft(app, '/') cherrypy.config.update({ 'engine.autoreload_on': True, 'log.screen': True, 'server.socket_port': int(os.environ.get('PORT', '8080')), 'server.socket_host': '0.0.0.0' }) cherrypy.engine.start() cherrypy.engine.block() if __name__ == '__main__': run_server()
269474608221e35907896f5f618e69d6e5136388
facepy/exceptions.py
facepy/exceptions.py
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message)
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
Remove uneccessary getter and setter
Remove uneccessary getter and setter
Python
mit
merwok-forks/facepy,jwjohns/facepy,jgorset/facepy,Spockuto/facepy,liorshahverdi/facepy,buzzfeed/facepy,jwjohns/facepy
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) Remove uneccessary getter and setter
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
<commit_before>class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) <commit_msg>Remove uneccessary getter and setter<commit_after>
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) Remove uneccessary getter and setterclass FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
<commit_before>class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) <commit_msg>Remove uneccessary getter and setter<commit_after>class FacepyError(Exception): """Base class for exceptions raised by Facepy.""" def __init__(self, message): self.message = message
23e1868aa9d0c0a6611914b0f648c46d329e00db
genes/gnu_coreutils/commands.py
genes/gnu_coreutils/commands.py
#!/usr/bin/env python from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(): pass @only_posix() def usermod(): pass
#!/usr/bin/env python from typing import Optional, Dict from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(*args): # FIXME: this is a bad way to do things # FIXME: sigh. this is going to be a pain to make it idempotent run(['useradd'] + list(*args)) @only_posix() def usermod(*args): # FIXME: this is a bad way to do things run(['usermod'] + list(*args))
Create some coreutils bindings. they're not good
Create some coreutils bindings. they're not good
Python
mit
hatchery/genepool,hatchery/Genepool2
#!/usr/bin/env python from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(): pass @only_posix() def usermod(): pass Create some coreutils bindings. they're not good
#!/usr/bin/env python from typing import Optional, Dict from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(*args): # FIXME: this is a bad way to do things # FIXME: sigh. this is going to be a pain to make it idempotent run(['useradd'] + list(*args)) @only_posix() def usermod(*args): # FIXME: this is a bad way to do things run(['usermod'] + list(*args))
<commit_before>#!/usr/bin/env python from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(): pass @only_posix() def usermod(): pass <commit_msg>Create some coreutils bindings. they're not good<commit_after>
#!/usr/bin/env python from typing import Optional, Dict from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(*args): # FIXME: this is a bad way to do things # FIXME: sigh. this is going to be a pain to make it idempotent run(['useradd'] + list(*args)) @only_posix() def usermod(*args): # FIXME: this is a bad way to do things run(['usermod'] + list(*args))
#!/usr/bin/env python from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(): pass @only_posix() def usermod(): pass Create some coreutils bindings. they're not good#!/usr/bin/env python from typing import Optional, Dict from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(*args): # FIXME: this is a bad way to do things # FIXME: sigh. this is going to be a pain to make it idempotent run(['useradd'] + list(*args)) @only_posix() def usermod(*args): # FIXME: this is a bad way to do things run(['usermod'] + list(*args))
<commit_before>#!/usr/bin/env python from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(): pass @only_posix() def usermod(): pass <commit_msg>Create some coreutils bindings. they're not good<commit_after>#!/usr/bin/env python from typing import Optional, Dict from genes.process.commands import run from genes.posix.traits import only_posix @only_posix() def chgrp(path, group): run(['chgrp', group, path]) @only_posix() def chown(path, user): run(['chown', user, path]) @only_posix() def mkdir(path, mode=None): if mode: run(['mkdir', '-m', mode, path]) else: run(['mkdir', path]) @only_posix() def useradd(*args): # FIXME: this is a bad way to do things # FIXME: sigh. this is going to be a pain to make it idempotent run(['useradd'] + list(*args)) @only_posix() def usermod(*args): # FIXME: this is a bad way to do things run(['usermod'] + list(*args))
6d8b99b5e4dab49c5a2e90b07f02072c116a7367
robots/models.py
robots/models.py
from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) objects = models.Manager() class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt'])
from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt'])
Remove unnecessary manager declaration from File model
Remove unnecessary manager declaration from File model
Python
isc
trilan/lemon-robots,trilan/lemon-robots
from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) objects = models.Manager() class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt']) Remove unnecessary manager declaration from File model
from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt'])
<commit_before>from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) objects = models.Manager() class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt']) <commit_msg>Remove unnecessary manager declaration from File model<commit_after>
from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt'])
from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) objects = models.Manager() class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt']) Remove unnecessary manager declaration from File modelfrom django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt'])
<commit_before>from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) objects = models.Manager() class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt']) <commit_msg>Remove unnecessary manager declaration from File model<commit_after>from django.db import models from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ class File(models.Model): site = models.OneToOneField(Site, verbose_name=_(u'site')) content = models.TextField(_(u'file content')) class Meta: verbose_name = _(u'robots.txt file') verbose_name_plural = _(u'robots.txt files') def __unicode__(self): return u'/'.join([self.site.domain, u'robots.txt'])
aae85883bb99ac15f6922506fa64c4492101b602
utils/lit/tests/shared-output.py
utils/lit/tests/shared-output.py
# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, "%{inputs}/shared-output/lit.cfg")' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other
# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other
Fix new test for systems that don't use / as os.path.sep
lit.py: Fix new test for systems that don't use / as os.path.sep git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315773 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm
# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, "%{inputs}/shared-output/lit.cfg")' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other lit.py: Fix new test for systems that don't use / as os.path.sep git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315773 91177308-0d34-0410-b5e6-96231b3b80d8
# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other
<commit_before># RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, "%{inputs}/shared-output/lit.cfg")' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other <commit_msg>lit.py: Fix new test for systems that don't use / as os.path.sep git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315773 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other
# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, "%{inputs}/shared-output/lit.cfg")' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other lit.py: Fix new test for systems that don't use / as os.path.sep git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315773 91177308-0d34-0410-b5e6-96231b3b80d8# RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other
<commit_before># RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, "%{inputs}/shared-output/lit.cfg")' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other <commit_msg>lit.py: Fix new test for systems that don't use / as os.path.sep git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315773 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after># RUN: rm -rf %t && mkdir -p %t # RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg # RUN: %{lit} %t # RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp # RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp # CHECK-DAG: primary # CHECK-DAG: secondary # CHECK-DAG: sub # NEGATIVE-NOT: other # OTHER: other
5867a09fb43f8c4480d7aef89a200e952406a648
dbaas/integrations/credentials/admin/__init__.py
dbaas/integrations/credentials/admin/__init__.py
# -*- coding:utf-8 -*- from django.contrib import admin from .. import models admin.site.register(models.IntegrationType, ) admin.site.register(models.IntegrationCredential, )
# -*- coding:utf-8 -*- from django.contrib import admin from .integration_credential import IntegrationCredentialAdmin from .integration_type import IntegrationTypeAdmin from .. import models admin.site.register(models.IntegrationType, IntegrationTypeAdmin) admin.site.register(models.IntegrationCredential, IntegrationCredentialAdmin)
Enable integration credential and integration type admin
Enable integration credential and integration type admin
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
# -*- coding:utf-8 -*- from django.contrib import admin from .. import models admin.site.register(models.IntegrationType, ) admin.site.register(models.IntegrationCredential, ) Enable integration credential and integration type admin
# -*- coding:utf-8 -*- from django.contrib import admin from .integration_credential import IntegrationCredentialAdmin from .integration_type import IntegrationTypeAdmin from .. import models admin.site.register(models.IntegrationType, IntegrationTypeAdmin) admin.site.register(models.IntegrationCredential, IntegrationCredentialAdmin)
<commit_before># -*- coding:utf-8 -*- from django.contrib import admin from .. import models admin.site.register(models.IntegrationType, ) admin.site.register(models.IntegrationCredential, ) <commit_msg>Enable integration credential and integration type admin<commit_after>
# -*- coding:utf-8 -*- from django.contrib import admin from .integration_credential import IntegrationCredentialAdmin from .integration_type import IntegrationTypeAdmin from .. import models admin.site.register(models.IntegrationType, IntegrationTypeAdmin) admin.site.register(models.IntegrationCredential, IntegrationCredentialAdmin)
# -*- coding:utf-8 -*- from django.contrib import admin from .. import models admin.site.register(models.IntegrationType, ) admin.site.register(models.IntegrationCredential, ) Enable integration credential and integration type admin# -*- coding:utf-8 -*- from django.contrib import admin from .integration_credential import IntegrationCredentialAdmin from .integration_type import IntegrationTypeAdmin from .. import models admin.site.register(models.IntegrationType, IntegrationTypeAdmin) admin.site.register(models.IntegrationCredential, IntegrationCredentialAdmin)
<commit_before># -*- coding:utf-8 -*- from django.contrib import admin from .. import models admin.site.register(models.IntegrationType, ) admin.site.register(models.IntegrationCredential, ) <commit_msg>Enable integration credential and integration type admin<commit_after># -*- coding:utf-8 -*- from django.contrib import admin from .integration_credential import IntegrationCredentialAdmin from .integration_type import IntegrationTypeAdmin from .. import models admin.site.register(models.IntegrationType, IntegrationTypeAdmin) admin.site.register(models.IntegrationCredential, IntegrationCredentialAdmin)
c08c437b22982667e8ed413739147caec6c5d1ca
api/preprints/urls.py
api/preprints/urls.py
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), ]
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), url(r'^(?P<node_id>\w+)/relationships/preprint_provider/$', views.PreprintToPreprintProviderRelationship.as_view(), name=views.PreprintToPreprintProviderRelationship.view_name), ]
Add URL route for updating provider relationship
Add URL route for updating provider relationship
Python
apache-2.0
mluo613/osf.io,rdhyee/osf.io,samchrisinger/osf.io,leb2dg/osf.io,cslzchen/osf.io,chrisseto/osf.io,leb2dg/osf.io,binoculars/osf.io,mluo613/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,emetsger/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,icereval/osf.io,binoculars/osf.io,cslzchen/osf.io,caneruguz/osf.io,samchrisinger/osf.io,baylee-d/osf.io,TomBaxter/osf.io,crcresearch/osf.io,icereval/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,cslzchen/osf.io,mfraezz/osf.io,mattclark/osf.io,cwisecarver/osf.io,chennan47/osf.io,aaxelb/osf.io,erinspace/osf.io,emetsger/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,saradbowman/osf.io,mfraezz/osf.io,erinspace/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,chennan47/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,hmoco/osf.io,pattisdr/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,acshi/osf.io,sloria/osf.io,mluo613/osf.io,Nesiehr/osf.io,alexschiller/osf.io,aaxelb/osf.io,TomBaxter/osf.io,mluo613/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,pattisdr/osf.io,leb2dg/osf.io,adlius/osf.io,caseyrollins/osf.io,chrisseto/osf.io,mfraezz/osf.io,caseyrollins/osf.io,crcresearch/osf.io,alexschiller/osf.io,felliott/osf.io,alexschiller/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,adlius/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,rdhyee/osf.io,hmoco/osf.io,baylee-d/osf.io,cwisecarver/osf.io,mattclark/osf.io,cslzchen/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,felliott/osf.io,caneruguz/osf.io,Nesiehr/osf.io,acshi/osf.io,mattclark/osf.io,felliott/osf.io,TomBaxter/osf.io,crcresearch/osf.io,acshi/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,adlius/osf.io,mfraezz/osf.io,acshi/osf.io,emetsger/osf.io,sloria/osf.io,laurenrevere/osf.io,felliott/osf.io,chrisseto/osf.io,chrisseto/osf.io,aaxelb/osf.io,adlius/osf.io,icereval/osf.io,erinspace/osf.io,hmoco/osf.io,cwisecarver/osf.io,mluo613/osf.io,pattisdr/osf.io
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), ] Add URL route for updating provider relationship
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), url(r'^(?P<node_id>\w+)/relationships/preprint_provider/$', views.PreprintToPreprintProviderRelationship.as_view(), name=views.PreprintToPreprintProviderRelationship.view_name), ]
<commit_before>from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), ] <commit_msg>Add URL route for updating provider relationship<commit_after>
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), url(r'^(?P<node_id>\w+)/relationships/preprint_provider/$', views.PreprintToPreprintProviderRelationship.as_view(), name=views.PreprintToPreprintProviderRelationship.view_name), ]
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), ] Add URL route for updating provider relationshipfrom django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), url(r'^(?P<node_id>\w+)/relationships/preprint_provider/$', views.PreprintToPreprintProviderRelationship.as_view(), name=views.PreprintToPreprintProviderRelationship.view_name), ]
<commit_before>from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), ] <commit_msg>Add URL route for updating provider relationship<commit_after>from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name), url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name), url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name), url(r'^(?P<node_id>\w+)/relationships/preprint_provider/$', views.PreprintToPreprintProviderRelationship.as_view(), name=views.PreprintToPreprintProviderRelationship.view_name), ]
597f586d2cf42f31a0179efc7ac8441f33b3d637
lib/mysql.py
lib/mysql.py
import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor()
import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._port = port self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor()
Define the port variable for reconnection
Define the port variable for reconnection
Python
mit
ImShady/Tubey
import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor() Define the port variable for reconnection
import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._port = port self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor()
<commit_before>import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor() <commit_msg>Define the port variable for reconnection<commit_after>
import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._port = port self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor()
import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor() Define the port variable for reconnectionimport pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._port = port self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor()
<commit_before>import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor() <commit_msg>Define the port variable for reconnection<commit_after>import pymysql class MySQL(): def __init__(self, host, user, password, port): self._host = host self._user = user self._password = password self._port = port self._conn = pymysql.connect(host=host, port=port, user=user, passwd=password) self._cursor = self._conn.cursor() def execute(self, query): try: self._cursor.execute(query=query) except (AttributeError, pymysql.OperationalError): self.__reconnect__() self._cursor.execute(query=query) def fetchone(self): return self._cursor.fetchone() def commit(self): return self._conn.commit() def __reconnect__(self): self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password) self._cursor = self._conn.cursor()
999752ec378bbf6d3017f7afc964090c6871b7d4
app/user_administration/tests.py
app/user_administration/tests.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) )
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from django.contrib.auth.models import User from .models import Clients class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) class LoginSetup(TestCase): def setUp(self): self.user = User.objects.create(username='testUser', is_active=True, is_superuser=True) self.user.set_password('RHChallenge') self.user.save() self.client.force_login(self.user) class ClientsViewTest(LoginSetup): def setUp(self): super(ClientsViewTest, self).setUp() self.custom_client = Clients.objects.create(first_name='RH', last_name='CH', iban='IBAN') def test_client_create(self): data = {'first_name': 'Rexhep', 'last_name': 'Berlajolli', 'iban': 'XK051506001004471930'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 2, msg="Create client failed, received {0} clients instead of 2".format(clients_count) ) def test_client_create_validation(self): data = {'first_name': 'Invalid', 'last_name': 'Data', 'iban': 'INVALID_IBAN'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 1, msg="Insertion of invalid data succeeded, received {0} clients instead of 1".format(clients_count) ) def test_get_clients(self): response = self.client.get('/') clients = response.context_data['clients'] self.assertEqual( list(clients), list(Clients.objects.all()), msg="Get clients failed, received clients {0} instead of {1}".format(clients, [self.custom_client]) )
Add TestCase for ClientListView and ClientCreateView
Add TestCase for ClientListView and ClientCreateView
Python
mit
rexhepberlajolli/RHChallenge,rexhepberlajolli/RHChallenge
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) Add TestCase for ClientListView and ClientCreateView
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from django.contrib.auth.models import User from .models import Clients class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) class LoginSetup(TestCase): def setUp(self): self.user = User.objects.create(username='testUser', is_active=True, is_superuser=True) self.user.set_password('RHChallenge') self.user.save() self.client.force_login(self.user) class ClientsViewTest(LoginSetup): def setUp(self): super(ClientsViewTest, self).setUp() self.custom_client = Clients.objects.create(first_name='RH', last_name='CH', iban='IBAN') def test_client_create(self): data = {'first_name': 'Rexhep', 'last_name': 'Berlajolli', 'iban': 'XK051506001004471930'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 2, msg="Create client failed, received {0} clients instead of 2".format(clients_count) ) def test_client_create_validation(self): data = {'first_name': 'Invalid', 'last_name': 'Data', 'iban': 'INVALID_IBAN'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 1, msg="Insertion of invalid data succeeded, received {0} clients instead of 1".format(clients_count) ) def test_get_clients(self): response = self.client.get('/') clients = response.context_data['clients'] self.assertEqual( list(clients), list(Clients.objects.all()), msg="Get clients failed, received clients {0} instead of {1}".format(clients, [self.custom_client]) )
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) <commit_msg>Add TestCase for ClientListView and ClientCreateView<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from django.contrib.auth.models import User from .models import Clients class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) class LoginSetup(TestCase): def setUp(self): self.user = User.objects.create(username='testUser', is_active=True, is_superuser=True) self.user.set_password('RHChallenge') self.user.save() self.client.force_login(self.user) class ClientsViewTest(LoginSetup): def setUp(self): super(ClientsViewTest, self).setUp() self.custom_client = Clients.objects.create(first_name='RH', last_name='CH', iban='IBAN') def test_client_create(self): data = {'first_name': 'Rexhep', 'last_name': 'Berlajolli', 'iban': 'XK051506001004471930'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 2, msg="Create client failed, received {0} clients instead of 2".format(clients_count) ) def test_client_create_validation(self): data = {'first_name': 'Invalid', 'last_name': 'Data', 'iban': 'INVALID_IBAN'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 1, msg="Insertion of invalid data succeeded, received {0} clients instead of 1".format(clients_count) ) def test_get_clients(self): response = self.client.get('/') clients = response.context_data['clients'] self.assertEqual( list(clients), list(Clients.objects.all()), msg="Get clients failed, received clients {0} instead of {1}".format(clients, [self.custom_client]) )
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) Add TestCase for ClientListView and ClientCreateView# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from django.contrib.auth.models import User from .models import Clients class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) class LoginSetup(TestCase): def setUp(self): self.user = User.objects.create(username='testUser', is_active=True, is_superuser=True) self.user.set_password('RHChallenge') self.user.save() self.client.force_login(self.user) class ClientsViewTest(LoginSetup): def setUp(self): super(ClientsViewTest, self).setUp() self.custom_client = Clients.objects.create(first_name='RH', last_name='CH', iban='IBAN') def test_client_create(self): data = {'first_name': 'Rexhep', 'last_name': 'Berlajolli', 'iban': 'XK051506001004471930'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 2, msg="Create client failed, received {0} clients instead of 2".format(clients_count) ) def test_client_create_validation(self): data = {'first_name': 'Invalid', 'last_name': 'Data', 'iban': 'INVALID_IBAN'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 1, msg="Insertion of invalid data succeeded, received {0} clients instead of 1".format(clients_count) ) def test_get_clients(self): response = self.client.get('/') clients = response.context_data['clients'] self.assertEqual( list(clients), list(Clients.objects.all()), msg="Get clients failed, received clients {0} instead of {1}".format(clients, [self.custom_client]) )
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) <commit_msg>Add TestCase for ClientListView and ClientCreateView<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from django.contrib.auth.models import User from .models import Clients class LoginRequiredTest(TestCase): def test_login_required(self): response = self.client.get('/') self.assertEqual( response.status_code, 302, msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code) ) self.assertEqual( response.url, '/login?next=/', msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url) ) class LoginSetup(TestCase): def setUp(self): self.user = User.objects.create(username='testUser', is_active=True, is_superuser=True) self.user.set_password('RHChallenge') self.user.save() self.client.force_login(self.user) class ClientsViewTest(LoginSetup): def setUp(self): super(ClientsViewTest, self).setUp() self.custom_client = Clients.objects.create(first_name='RH', last_name='CH', iban='IBAN') def test_client_create(self): data = {'first_name': 'Rexhep', 'last_name': 'Berlajolli', 'iban': 'XK051506001004471930'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 2, msg="Create client failed, received {0} clients instead of 2".format(clients_count) ) def test_client_create_validation(self): data = {'first_name': 'Invalid', 'last_name': 'Data', 'iban': 'INVALID_IBAN'} self.client.post('/add', data=data) clients_count = Clients.objects.count() self.assertEqual( clients_count, 1, msg="Insertion of invalid data succeeded, received {0} clients instead of 1".format(clients_count) ) def test_get_clients(self): response = self.client.get('/') clients = response.context_data['clients'] self.assertEqual( list(clients), list(Clients.objects.all()), msg="Get clients failed, received clients {0} instead of {1}".format(clients, [self.custom_client]) )
912c8f8f3626c7da92b6864e02dfc2534f4f7873
exercises/leap/leap_test.py
exercises/leap/leap_test.py
import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main()
import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def test_year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main()
Stop leep test being ignored
Stop leep test being ignored
Python
mit
N-Parsons/exercism-python,exercism/python,behrtam/xpython,exercism/xpython,jmluy/xpython,smalley/python,exercism/xpython,behrtam/xpython,N-Parsons/exercism-python,jmluy/xpython,exercism/python,smalley/python
import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main() Stop leep test being ignored
import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def test_year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main()
<commit_before>import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main() <commit_msg>Stop leep test being ignored<commit_after>
import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def test_year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main()
import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main() Stop leep test being ignoredimport unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def test_year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main()
<commit_before>import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main() <commit_msg>Stop leep test being ignored<commit_after>import unittest from leap import is_leap_year # Tests adapted from `problem-specifications//canonical-data.json` @ v1.5.1 class LeapTest(unittest.TestCase): def test_year_not_divisible_by_4(self): self.assertIs(is_leap_year(2015), False) def test_year_divisible_by_2_not_divisible_by_4(self): self.assertIs(is_leap_year(1970), False) def test_year_divisible_by_4_not_divisible_by_100(self): self.assertIs(is_leap_year(1996), True) def test_year_divisible_by_100_not_divisible_by_400(self): self.assertIs(is_leap_year(2100), False) def test_year_divisible_by_400(self): self.assertIs(is_leap_year(2000), True) def test_year_divisible_by_200_not_divisible_by_400(self): self.assertIs(is_leap_year(1800), False) if __name__ == '__main__': unittest.main()
8b07dde78e753f6dce663481a68856024ed2fc49
plutokore/__init__.py
plutokore/__init__.py
from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', ]
from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io from . import configuration __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', 'configuration', ]
Add configuration module to package exports
Add configuration module to package exports
Python
mit
opcon/plutokore,opcon/plutokore
from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', ] Add configuration module to package exports
from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io from . import configuration __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', 'configuration', ]
<commit_before>from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', ] <commit_msg>Add configuration module to package exports<commit_after>
from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io from . import configuration __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', 'configuration', ]
from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', ] Add configuration module to package exportsfrom .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io from . import configuration __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', 'configuration', ]
<commit_before>from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', ] <commit_msg>Add configuration module to package exports<commit_after>from .environments.makino import MakinoProfile from .environments.king import KingProfile from .jet import AstroJet from . import luminosity from . import plotting from . import simulations from . import helpers from . import io from . import configuration __all__ = [ 'environments', 'luminosity', 'plotting', 'simulations', 'jet', 'helpers', 'io', 'configuration', ]
fcd2328549dcec2986e3b972f1a8bcfb0cf2e21b
rst2pdf/utils.py
rst2pdf/utils.py
# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(int(tokens[1]), int(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent)
# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * from styles import adjustUnits def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(adjustUnits(tokens[1]), adjustUnits(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent)
Add unit support for spacers
Add unit support for spacers
Python
mit
pombreda/rst2pdf,liuyi1112/rst2pdf,pombreda/rst2pdf,liuyi1112/rst2pdf,rst2pdf/rst2pdf,rst2pdf/rst2pdf
# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(int(tokens[1]), int(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent) Add unit support for spacers
# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * from styles import adjustUnits def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(adjustUnits(tokens[1]), adjustUnits(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent)
<commit_before># -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(int(tokens[1]), int(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent) <commit_msg>Add unit support for spacers<commit_after>
# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * from styles import adjustUnits def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(adjustUnits(tokens[1]), adjustUnits(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent)
# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(int(tokens[1]), int(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent) Add unit support for spacers# -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * from styles import adjustUnits def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(adjustUnits(tokens[1]), adjustUnits(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent)
<commit_before># -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(int(tokens[1]), int(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent) <commit_msg>Add unit support for spacers<commit_after># -*- coding: utf-8 -*- # See LICENSE.txt for licensing terms #$HeadURL$ #$LastChangedDate$ #$LastChangedRevision$ import shlex from reportlab.platypus import Spacer from flowables import * from styles import adjustUnits def parseRaw(data): """Parse and process a simple DSL to handle creation of flowables. Supported (can add others on request): * PageBreak * Spacer width, height """ elements = [] lines = data.splitlines() for line in lines: lexer = shlex.shlex(line) lexer.whitespace += ',' tokens = list(lexer) command = tokens[0] if command == 'PageBreak': if len(tokens) == 1: elements.append(MyPageBreak()) else: elements.append(MyPageBreak(tokens[1])) if command == 'Spacer': elements.append(Spacer(adjustUnits(tokens[1]), adjustUnits(tokens[2]))) if command == 'Transition': elements.append(Transition(*tokens[1:])) return elements # Looks like this is not used anywhere now: # def depth(node): # if node.parent == None: # return 0 # else: # return 1 + depth(node.parent)
6b0167514bb41f877945b408638fab72873f2da8
postgres_copy/__init__.py
postgres_copy/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyMapping', 'SQLCopyToCompiler', 'CopyToQuery', 'CopyManager', )
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyManager', 'CopyMapping', 'CopyToQuery', 'CopyToQuerySet', 'SQLCopyToCompiler', )
Add CopyToQuerySet to available imports
Add CopyToQuerySet to available imports
Python
mit
california-civic-data-coalition/django-postgres-copy
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyMapping', 'SQLCopyToCompiler', 'CopyToQuery', 'CopyManager', ) Add CopyToQuerySet to available imports
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyManager', 'CopyMapping', 'CopyToQuery', 'CopyToQuerySet', 'SQLCopyToCompiler', )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyMapping', 'SQLCopyToCompiler', 'CopyToQuery', 'CopyManager', ) <commit_msg>Add CopyToQuerySet to available imports<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyManager', 'CopyMapping', 'CopyToQuery', 'CopyToQuerySet', 'SQLCopyToCompiler', )
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyMapping', 'SQLCopyToCompiler', 'CopyToQuery', 'CopyManager', ) Add CopyToQuerySet to available imports#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyManager', 'CopyMapping', 'CopyToQuery', 'CopyToQuerySet', 'SQLCopyToCompiler', )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyMapping', 'SQLCopyToCompiler', 'CopyToQuery', 'CopyManager', ) <commit_msg>Add CopyToQuerySet to available imports<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db import models from django.db import connection from .copy_from import CopyMapping from .copy_to import SQLCopyToCompiler, CopyToQuery __version__ = '2.0.0' class CopyQuerySet(models.QuerySet): """ Subclass of QuerySet that adds from_csv and to_csv methods. """ def from_csv(self, csv_path, mapping, **kwargs): """ Copy CSV file from the provided path to the current model using the provided mapping. """ mapping = CopyMapping(self.model, csv_path, mapping, **kwargs) mapping.save(silent=True) def to_csv(self, csv_path, *fields): """ Copy current QuerySet to CSV at provided path. """ query = self.query.clone(CopyToQuery) query.copy_to_fields = fields compiler = query.get_compiler(self.db, connection=connection) compiler.execute_sql(csv_path) CopyManager = models.Manager.from_queryset(CopyQuerySet) __all__ = ( 'CopyManager', 'CopyMapping', 'CopyToQuery', 'CopyToQuerySet', 'SQLCopyToCompiler', )
639032215f7a51ca146810e8261448f4d0a318aa
downstream_node/models.py
downstream_node/models.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) block = db.Column('block', db.String()) seed = db.Column('seed', db.String()) response = db.Column('response', db.String(), nullable=True)
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) root_seed = db.Column(db.String()) block = db.Column(db.String()) seed = db.Column(db.String()) response = db.Column(db.String(), nullable=True)
Add root seed to model
Add root seed to model
Python
mit
Storj/downstream-node,Storj/downstream-node
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) block = db.Column('block', db.String()) seed = db.Column('seed', db.String()) response = db.Column('response', db.String(), nullable=True) Add root seed to model
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) root_seed = db.Column(db.String()) block = db.Column(db.String()) seed = db.Column(db.String()) response = db.Column(db.String(), nullable=True)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) block = db.Column('block', db.String()) seed = db.Column('seed', db.String()) response = db.Column('response', db.String(), nullable=True) <commit_msg>Add root seed to model<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) root_seed = db.Column(db.String()) block = db.Column(db.String()) seed = db.Column(db.String()) response = db.Column(db.String(), nullable=True)
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) block = db.Column('block', db.String()) seed = db.Column('seed', db.String()) response = db.Column('response', db.String(), nullable=True) Add root seed to model#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) root_seed = db.Column(db.String()) block = db.Column(db.String()) seed = db.Column(db.String()) response = db.Column(db.String(), nullable=True)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) block = db.Column('block', db.String()) seed = db.Column('seed', db.String()) response = db.Column('response', db.String(), nullable=True) <commit_msg>Add root seed to model<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.startup import db class Files(db.Model): __tablename__ = 'files' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column('filepath', db.String()) class Challenges(db.Model): __tablename__ = 'challenges' id = db.Column(db.Integer(), primary_key=True, autoincrement=True) filepath = db.Column(db.ForeignKey('files.filepath')) root_seed = db.Column(db.String()) block = db.Column(db.String()) seed = db.Column(db.String()) response = db.Column(db.String(), nullable=True)
c11fd9f792afb71e01224f149121bc13a6a9bed8
scripts/utils.py
scripts/utils.py
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" import json import os json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n')
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_load_params) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n')
Use the OrderedDict class for JSON objects.
scripts: Use the OrderedDict class for JSON objects.
Python
unlicense
thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" import json import os json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n') scripts: Use the OrderedDict class for JSON objects.
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_load_params) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n')
<commit_before>#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" import json import os json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n') <commit_msg>scripts: Use the OrderedDict class for JSON objects.<commit_after>
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_load_params) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n')
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" import json import os json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n') scripts: Use the OrderedDict class for JSON objects.#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_load_params) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n')
<commit_before>#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" import json import os json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n') <commit_msg>scripts: Use the OrderedDict class for JSON objects.<commit_after>#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_load_params) def json_store(fn, obj, dirs=['']): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_dump_params) file.write('\n')
0e22a642526612ff9d19d1b421a1aacea4109f15
pylearn2/datasets/hdf5.py
pylearn2/datasets/hdf5.py
"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, key): with h5py.File(filename) as f: data = f[key][:] if data.ndim == 2: super(HDF5Dataset, self).__init__(X=data) else: super(HDF5Dataset, self).__init__(topo_view=data)
"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, X=None, topo_view=None, y=None, **kwargs): """ Loads data and labels from HDF5 file. Parameters ---------- filename: str HDF5 file name. X: str Key into HDF5 file for dataset design matrix. topo_view: str Key into HDF5 file for topological view of dataset. y: str Key into HDF5 file for dataset targets. """ with h5py.File(filename) as f: if X is not None: X = f[X][:] if topo_view is not None: topo_view = f[topo_view][:] if y is not None: y = f[y][:] super(HDF5Dataset, self).__init__(X, topo_view, y, **kwargs)
Support for targets in HDF5 datasets
Support for targets in HDF5 datasets
Python
bsd-3-clause
alexjc/pylearn2,pombredanne/pylearn2,kose-y/pylearn2,TNick/pylearn2,lancezlin/pylearn2,theoryno3/pylearn2,aalmah/pylearn2,se4u/pylearn2,woozzu/pylearn2,daemonmaker/pylearn2,daemonmaker/pylearn2,goodfeli/pylearn2,JesseLivezey/pylearn2,nouiz/pylearn2,ddboline/pylearn2,CIFASIS/pylearn2,w1kke/pylearn2,lamblin/pylearn2,kastnerkyle/pylearn2,KennethPierce/pylearnk,mclaughlin6464/pylearn2,woozzu/pylearn2,woozzu/pylearn2,hantek/pylearn2,mkraemer67/pylearn2,cosmoharrigan/pylearn2,sandeepkbhat/pylearn2,fulmicoton/pylearn2,skearnes/pylearn2,ddboline/pylearn2,daemonmaker/pylearn2,fulmicoton/pylearn2,KennethPierce/pylearnk,jamessergeant/pylearn2,nouiz/pylearn2,mclaughlin6464/pylearn2,caidongyun/pylearn2,lamblin/pylearn2,caidongyun/pylearn2,ashhher3/pylearn2,sandeepkbhat/pylearn2,chrish42/pylearn,pkainz/pylearn2,se4u/pylearn2,se4u/pylearn2,goodfeli/pylearn2,Refefer/pylearn2,lunyang/pylearn2,msingh172/pylearn2,jeremyfix/pylearn2,junbochen/pylearn2,shiquanwang/pylearn2,matrogers/pylearn2,KennethPierce/pylearnk,w1kke/pylearn2,w1kke/pylearn2,shiquanwang/pylearn2,kastnerkyle/pylearn2,cosmoharrigan/pylearn2,jeremyfix/pylearn2,chrish42/pylearn,skearnes/pylearn2,nouiz/pylearn2,ashhher3/pylearn2,sandeepkbhat/pylearn2,lancezlin/pylearn2,aalmah/pylearn2,jamessergeant/pylearn2,pombredanne/pylearn2,lancezlin/pylearn2,TNick/pylearn2,ddboline/pylearn2,theoryno3/pylearn2,fishcorn/pylearn2,chrish42/pylearn,bartvm/pylearn2,chrish42/pylearn,hyqneuron/pylearn2-maxsom,aalmah/pylearn2,fishcorn/pylearn2,fulmicoton/pylearn2,hantek/pylearn2,mclaughlin6464/pylearn2,woozzu/pylearn2,JesseLivezey/plankton,pkainz/pylearn2,lisa-lab/pylearn2,pkainz/pylearn2,lisa-lab/pylearn2,jamessergeant/pylearn2,pkainz/pylearn2,JesseLivezey/plankton,sandeepkbhat/pylearn2,mkraemer67/pylearn2,KennethPierce/pylearnk,Refefer/pylearn2,lunyang/pylearn2,caidongyun/pylearn2,alexjc/pylearn2,bartvm/pylearn2,bartvm/pylearn2,lancezlin/pylearn2,JesseLivezey/plankton,jamessergeant/pylearn2,shiquanwang/pylearn2,lisa-lab/pylearn2,msingh172/pylearn2,kastnerkyle/pylearn2,goodfeli/pylearn2,goodfeli/pylearn2,fyffyt/pylearn2,mkraemer67/pylearn2,cosmoharrigan/pylearn2,msingh172/pylearn2,kastnerkyle/pylearn2,JesseLivezey/pylearn2,mclaughlin6464/pylearn2,fishcorn/pylearn2,jeremyfix/pylearn2,caidongyun/pylearn2,hyqneuron/pylearn2-maxsom,hyqneuron/pylearn2-maxsom,matrogers/pylearn2,theoryno3/pylearn2,CIFASIS/pylearn2,hantek/pylearn2,junbochen/pylearn2,Refefer/pylearn2,matrogers/pylearn2,JesseLivezey/plankton,ashhher3/pylearn2,Refefer/pylearn2,cosmoharrigan/pylearn2,fyffyt/pylearn2,junbochen/pylearn2,abergeron/pylearn2,fishcorn/pylearn2,abergeron/pylearn2,abergeron/pylearn2,skearnes/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,lunyang/pylearn2,ashhher3/pylearn2,aalmah/pylearn2,shiquanwang/pylearn2,junbochen/pylearn2,lamblin/pylearn2,matrogers/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,fyffyt/pylearn2,lisa-lab/pylearn2,hyqneuron/pylearn2-maxsom,alexjc/pylearn2,daemonmaker/pylearn2,fulmicoton/pylearn2,TNick/pylearn2,skearnes/pylearn2,kose-y/pylearn2,hantek/pylearn2,kose-y/pylearn2,fyffyt/pylearn2,nouiz/pylearn2,CIFASIS/pylearn2,w1kke/pylearn2,bartvm/pylearn2,pombredanne/pylearn2,lamblin/pylearn2,theoryno3/pylearn2,abergeron/pylearn2,alexjc/pylearn2,ddboline/pylearn2,JesseLivezey/pylearn2,TNick/pylearn2,kose-y/pylearn2,se4u/pylearn2,JesseLivezey/pylearn2,pombredanne/pylearn2,jeremyfix/pylearn2
"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, key): with h5py.File(filename) as f: data = f[key][:] if data.ndim == 2: super(HDF5Dataset, self).__init__(X=data) else: super(HDF5Dataset, self).__init__(topo_view=data) Support for targets in HDF5 datasets
"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, X=None, topo_view=None, y=None, **kwargs): """ Loads data and labels from HDF5 file. Parameters ---------- filename: str HDF5 file name. X: str Key into HDF5 file for dataset design matrix. topo_view: str Key into HDF5 file for topological view of dataset. y: str Key into HDF5 file for dataset targets. """ with h5py.File(filename) as f: if X is not None: X = f[X][:] if topo_view is not None: topo_view = f[topo_view][:] if y is not None: y = f[y][:] super(HDF5Dataset, self).__init__(X, topo_view, y, **kwargs)
<commit_before>"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, key): with h5py.File(filename) as f: data = f[key][:] if data.ndim == 2: super(HDF5Dataset, self).__init__(X=data) else: super(HDF5Dataset, self).__init__(topo_view=data) <commit_msg>Support for targets in HDF5 datasets<commit_after>
"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, X=None, topo_view=None, y=None, **kwargs): """ Loads data and labels from HDF5 file. Parameters ---------- filename: str HDF5 file name. X: str Key into HDF5 file for dataset design matrix. topo_view: str Key into HDF5 file for topological view of dataset. y: str Key into HDF5 file for dataset targets. """ with h5py.File(filename) as f: if X is not None: X = f[X][:] if topo_view is not None: topo_view = f[topo_view][:] if y is not None: y = f[y][:] super(HDF5Dataset, self).__init__(X, topo_view, y, **kwargs)
"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, key): with h5py.File(filename) as f: data = f[key][:] if data.ndim == 2: super(HDF5Dataset, self).__init__(X=data) else: super(HDF5Dataset, self).__init__(topo_view=data) Support for targets in HDF5 datasets"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, X=None, topo_view=None, y=None, **kwargs): """ Loads data and labels from HDF5 file. Parameters ---------- filename: str HDF5 file name. X: str Key into HDF5 file for dataset design matrix. topo_view: str Key into HDF5 file for topological view of dataset. y: str Key into HDF5 file for dataset targets. """ with h5py.File(filename) as f: if X is not None: X = f[X][:] if topo_view is not None: topo_view = f[topo_view][:] if y is not None: y = f[y][:] super(HDF5Dataset, self).__init__(X, topo_view, y, **kwargs)
<commit_before>"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, key): with h5py.File(filename) as f: data = f[key][:] if data.ndim == 2: super(HDF5Dataset, self).__init__(X=data) else: super(HDF5Dataset, self).__init__(topo_view=data) <commit_msg>Support for targets in HDF5 datasets<commit_after>"""Objects for datasets serialized in HDF5 format (.h5).""" import h5py from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix class HDF5Dataset(DenseDesignMatrix): """Dense dataset loaded from an HDF5 file.""" def __init__(self, filename, X=None, topo_view=None, y=None, **kwargs): """ Loads data and labels from HDF5 file. Parameters ---------- filename: str HDF5 file name. X: str Key into HDF5 file for dataset design matrix. topo_view: str Key into HDF5 file for topological view of dataset. y: str Key into HDF5 file for dataset targets. """ with h5py.File(filename) as f: if X is not None: X = f[X][:] if topo_view is not None: topo_view = f[topo_view][:] if y is not None: y = f[y][:] super(HDF5Dataset, self).__init__(X, topo_view, y, **kwargs)
b824cadfe61de19b5ff0f7391fe2b21b034c71b4
readdata.py
readdata.py
import os,sys import json import csv import soundfile as sf from scipy.fftpack import dct from features import mfcc,fbank,sigproc,logfbank def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with (currently) 4 features def extractFeatures(directory,filename): try: data,samplerate=sf.read(os.path.join(directory, filename)) except (IOError, RuntimeError): print("Could not open file ", filename) print("Exiting...") sys.exit() #if file was opened succesfully proceed with feature extraction #win is the size of window for mfcc extraction AND step size win=data.size/(4*samplerate) featureVector=mfcc(data,samplerate,win,win,1) #featureVector is of type numpy_array return featureVector
import os,sys import json import csv from yaafelib import * def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with 2 features def extractFeatures(directory,filename): # yaaaaafe fp = FeaturePlan(sample_rate=44100, resample=True) fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256 CepsNbCoeffs=1') fp.addFeature('psp: PerceptualSpread blockSize=512 stepSize=256') df = fp.getDataFlow() engine = Engine() engine.load(df) afp = AudioFileProcessor() afp.processFile(engine,os.path.join(directory, filename)) featureVector = engine.readAllOutputs() return featureVector
Use yaafe for feature extraction
Use yaafe for feature extraction Right now we extract two features (mfcc and perceptual spread) but there is a lot of work to be done on the feature extraction method so this is probably going to change
Python
mit
lOStres/JaFEM
import os,sys import json import csv import soundfile as sf from scipy.fftpack import dct from features import mfcc,fbank,sigproc,logfbank def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with (currently) 4 features def extractFeatures(directory,filename): try: data,samplerate=sf.read(os.path.join(directory, filename)) except (IOError, RuntimeError): print("Could not open file ", filename) print("Exiting...") sys.exit() #if file was opened succesfully proceed with feature extraction #win is the size of window for mfcc extraction AND step size win=data.size/(4*samplerate) featureVector=mfcc(data,samplerate,win,win,1) #featureVector is of type numpy_array return featureVector Use yaafe for feature extraction Right now we extract two features (mfcc and perceptual spread) but there is a lot of work to be done on the feature extraction method so this is probably going to change
import os,sys import json import csv from yaafelib import * def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with 2 features def extractFeatures(directory,filename): # yaaaaafe fp = FeaturePlan(sample_rate=44100, resample=True) fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256 CepsNbCoeffs=1') fp.addFeature('psp: PerceptualSpread blockSize=512 stepSize=256') df = fp.getDataFlow() engine = Engine() engine.load(df) afp = AudioFileProcessor() afp.processFile(engine,os.path.join(directory, filename)) featureVector = engine.readAllOutputs() return featureVector
<commit_before>import os,sys import json import csv import soundfile as sf from scipy.fftpack import dct from features import mfcc,fbank,sigproc,logfbank def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with (currently) 4 features def extractFeatures(directory,filename): try: data,samplerate=sf.read(os.path.join(directory, filename)) except (IOError, RuntimeError): print("Could not open file ", filename) print("Exiting...") sys.exit() #if file was opened succesfully proceed with feature extraction #win is the size of window for mfcc extraction AND step size win=data.size/(4*samplerate) featureVector=mfcc(data,samplerate,win,win,1) #featureVector is of type numpy_array return featureVector <commit_msg>Use yaafe for feature extraction Right now we extract two features (mfcc and perceptual spread) but there is a lot of work to be done on the feature extraction method so this is probably going to change<commit_after>
import os,sys import json import csv from yaafelib import * def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with 2 features def extractFeatures(directory,filename): # yaaaaafe fp = FeaturePlan(sample_rate=44100, resample=True) fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256 CepsNbCoeffs=1') fp.addFeature('psp: PerceptualSpread blockSize=512 stepSize=256') df = fp.getDataFlow() engine = Engine() engine.load(df) afp = AudioFileProcessor() afp.processFile(engine,os.path.join(directory, filename)) featureVector = engine.readAllOutputs() return featureVector
import os,sys import json import csv import soundfile as sf from scipy.fftpack import dct from features import mfcc,fbank,sigproc,logfbank def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with (currently) 4 features def extractFeatures(directory,filename): try: data,samplerate=sf.read(os.path.join(directory, filename)) except (IOError, RuntimeError): print("Could not open file ", filename) print("Exiting...") sys.exit() #if file was opened succesfully proceed with feature extraction #win is the size of window for mfcc extraction AND step size win=data.size/(4*samplerate) featureVector=mfcc(data,samplerate,win,win,1) #featureVector is of type numpy_array return featureVector Use yaafe for feature extraction Right now we extract two features (mfcc and perceptual spread) but there is a lot of work to be done on the feature extraction method so this is probably going to changeimport os,sys import json import csv from yaafelib import * def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with 2 features def extractFeatures(directory,filename): # yaaaaafe fp = FeaturePlan(sample_rate=44100, resample=True) fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256 CepsNbCoeffs=1') fp.addFeature('psp: PerceptualSpread blockSize=512 stepSize=256') df = fp.getDataFlow() engine = Engine() engine.load(df) afp = AudioFileProcessor() afp.processFile(engine,os.path.join(directory, filename)) featureVector = engine.readAllOutputs() return featureVector
<commit_before>import os,sys import json import csv import soundfile as sf from scipy.fftpack import dct from features import mfcc,fbank,sigproc,logfbank def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with (currently) 4 features def extractFeatures(directory,filename): try: data,samplerate=sf.read(os.path.join(directory, filename)) except (IOError, RuntimeError): print("Could not open file ", filename) print("Exiting...") sys.exit() #if file was opened succesfully proceed with feature extraction #win is the size of window for mfcc extraction AND step size win=data.size/(4*samplerate) featureVector=mfcc(data,samplerate,win,win,1) #featureVector is of type numpy_array return featureVector <commit_msg>Use yaafe for feature extraction Right now we extract two features (mfcc and perceptual spread) but there is a lot of work to be done on the feature extraction method so this is probably going to change<commit_after>import os,sys import json import csv from yaafelib import * def parseJSON(directory, filename): data=[] jsonMeta=[] #open all files that end with .json in <path> directory #and store certain attributes try: json_data=open(os.path.join(directory, filename)) except(IOError, RuntimeError ): print("Cannot open ", filename) data=json.load(json_data) jsonMeta.append(data['filesize']) jsonMeta.append(data['duration']) jsonMeta.append(data['samplerate']) jsonMeta.append(data['tags']) jsonMeta.append(data['type']) return jsonMeta def parseCSV(directory, filename): with open(os.path.join(directory, filename)) as csvfile: csvMeta = csv.reader(csvfile, delimiter=",") return list(csvMeta)[0] #returns a vector with 2 features def extractFeatures(directory,filename): # yaaaaafe fp = FeaturePlan(sample_rate=44100, resample=True) fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256 CepsNbCoeffs=1') fp.addFeature('psp: PerceptualSpread blockSize=512 stepSize=256') df = fp.getDataFlow() engine = Engine() engine.load(df) afp = AudioFileProcessor() afp.processFile(engine,os.path.join(directory, filename)) featureVector = engine.readAllOutputs() return featureVector
b0d9a11292b6d6b17fe8b72d7735d26c47599187
linkatos/printer.py
linkatos/printer.py
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
Change iteration over a collection based on ags suggestion
refactor: Change iteration over a collection based on ags suggestion
Python
mit
iwi/linkatos,iwi/linkatos
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client) refactor: Change iteration over a collection based on ags suggestion
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
<commit_before>def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client) <commit_msg>refactor: Change iteration over a collection based on ags suggestion<commit_after>
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client) refactor: Change iteration over a collection based on ags suggestiondef bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
<commit_before>def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" list_message = "The list of urls to be confirmed is: \n" for index in range(0, len(url_cache_list)): extra = "{} - {} \n".format(index, url_cache_list[index]['url']) list_message = list_message + extra return list_message def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client) <commit_msg>refactor: Change iteration over a collection based on ags suggestion<commit_after>def bot_says(channel, text, slack_client): return slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def compose_explanation(url): return "If you would like {} to be stored please react to it with a :+1:, \ if you would like it to be ignored use :-1:".format(url) def ask_confirmation(message, slack_client): bot_says(message['channel'], compose_explanation(message['url']), slack_client) def compose_url_list(url_cache_list): if len(url_cache_list) == 0: return "The list is empty" intro = "The list of urls to be confirmed is: \n" options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)] return intro + "\n".join(options) def list_cached_urls(url_cache_list, channel, slack_client): bot_says(channel, compose_url_list(url_cache_list), slack_client)
547725be668e1e639ec0e6569e18a1e8bf03585c
tictactoe/settings_production.py
tictactoe/settings_production.py
from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False # TODO: temporarily disabled to test Heroku # ALLOWED_HOSTS = ['tictactoe.zupec.net'] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # }
from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False ALLOWED_HOSTS = [ 'tictactoe.zupec.net', 'tictactoe-zupec.herokuapp.com', ] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # }
Add Heroku DNS to allowed hosts
Add Heroku DNS to allowed hosts
Python
apache-2.0
NejcZupec/tictactoe,NejcZupec/tictactoe,NejcZupec/tictactoe
from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False # TODO: temporarily disabled to test Heroku # ALLOWED_HOSTS = ['tictactoe.zupec.net'] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # } Add Heroku DNS to allowed hosts
from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False ALLOWED_HOSTS = [ 'tictactoe.zupec.net', 'tictactoe-zupec.herokuapp.com', ] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # }
<commit_before>from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False # TODO: temporarily disabled to test Heroku # ALLOWED_HOSTS = ['tictactoe.zupec.net'] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # } <commit_msg>Add Heroku DNS to allowed hosts<commit_after>
from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False ALLOWED_HOSTS = [ 'tictactoe.zupec.net', 'tictactoe-zupec.herokuapp.com', ] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # }
from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False # TODO: temporarily disabled to test Heroku # ALLOWED_HOSTS = ['tictactoe.zupec.net'] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # } Add Heroku DNS to allowed hostsfrom django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False ALLOWED_HOSTS = [ 'tictactoe.zupec.net', 'tictactoe-zupec.herokuapp.com', ] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # }
<commit_before>from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False # TODO: temporarily disabled to test Heroku # ALLOWED_HOSTS = ['tictactoe.zupec.net'] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # } <commit_msg>Add Heroku DNS to allowed hosts<commit_after>from django.core.exceptions import ImproperlyConfigured from .settings import * def get_env_variable(var_name): """ Get the environment variable or return exception. """ try: return os.environ[var_name] except KeyError: error_msg = 'Set the %s environment variable' % var_name raise ImproperlyConfigured(error_msg) DEBUG = False ALLOWED_HOSTS = [ 'tictactoe.zupec.net', 'tictactoe-zupec.herokuapp.com', ] SECRET_KEY = get_env_variable('SECRET_KEY') MIDDLEWARE_CLASSES += ( # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ 'whitenoise.middleware.WhiteNoiseMiddleware', ) # TODO: temporarily disabled to test Heroku # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': get_env_variable("DATABASE_NAME"), # 'USER': get_env_variable("DATABASE_USER"), # 'PASSWORD': get_env_variable("DATABASE_PASSWORD"), # 'HOST': get_env_variable("DATABASE_HOST"), # 'PORT': '5432', # }, # }
221f77cfba10cb52ab9fbb639cc948d7a61beb98
electionleaflets/settings/zappa.py
electionleaflets/settings/zappa.py
import os from .base import * # GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org']
import os from .base import * GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org']
Fix the geos library location
Fix the geos library location
Python
mit
DemocracyClub/electionleaflets,DemocracyClub/electionleaflets,DemocracyClub/electionleaflets
import os from .base import * # GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org'] Fix the geos library location
import os from .base import * GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org']
<commit_before>import os from .base import * # GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org'] <commit_msg>Fix the geos library location<commit_after>
import os from .base import * GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org']
import os from .base import * # GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org'] Fix the geos library locationimport os from .base import * GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org']
<commit_before>import os from .base import * # GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org'] <commit_msg>Fix the geos library location<commit_after>import os from .base import * GEOS_LIBRARY_PATH = '/var/task/libgeos_c.so' ALLOWED_HOSTS = ['*'] # Override the database name and user if needed DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'HOST': os.environ.get('DATABASE_HOST'), 'USER': os.environ.get('DATABASE_USER'), 'PORT': '5432', 'NAME': os.environ.get('DATABASE_NAME'), 'PASSWORD': os.environ.get('DATABASE_PASS') } } DEFAULT_FILE_STORAGE = 's3_lambda_storage.S3LambdaStorage' STATICFILES_STORAGE = 's3_lambda_storage.S3StaticLambdaStorage' AWS_STORAGE_BUCKET_NAME = "data.electionleaflets.org" AWS_S3_SECURE_URLS = True AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = "data.electionleaflets.org" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'el_cache', } } THUMBNAIL_KVSTORE ='sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' CSRF_TRUSTED_ORIGINS = ['.electionleaflets.org']
32e2d6e866cee45d4955aa020d9b9bd3c0a2b700
pudb/__init__.py
pudb/__init__.py
VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback)
VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) if __name__ == "__main__": print "To keep Python 2.6 happy, you now need to type 'python -m pudb.run'." print "Sorry for the inconvenience."
Print a warning about the move to '-m pudb.run'.
Print a warning about the move to '-m pudb.run'.
Python
mit
amigrave/pudb,albfan/pudb,amigrave/pudb,albfan/pudb
VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) Print a warning about the move to '-m pudb.run'.
VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) if __name__ == "__main__": print "To keep Python 2.6 happy, you now need to type 'python -m pudb.run'." print "Sorry for the inconvenience."
<commit_before>VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) <commit_msg>Print a warning about the move to '-m pudb.run'.<commit_after>
VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) if __name__ == "__main__": print "To keep Python 2.6 happy, you now need to type 'python -m pudb.run'." print "Sorry for the inconvenience."
VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) Print a warning about the move to '-m pudb.run'.VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) if __name__ == "__main__": print "To keep Python 2.6 happy, you now need to type 'python -m pudb.run'." print "Sorry for the inconvenience."
<commit_before>VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) <commit_msg>Print a warning about the move to '-m pudb.run'.<commit_after>VERSION = "0.91.2" CURRENT_DEBUGGER = [None] def set_trace(): if CURRENT_DEBUGGER[0] is None: from pudb.debugger import Debugger dbg = Debugger() CURRENT_DEBUGGER[0] = dbg import sys dbg.set_trace(sys._getframe().f_back) def post_mortem(t): p = Debugger() p.reset() while t.tb_next is not None: t = t.tb_next p.interaction(t.tb_frame, t) def pm(): import sys post_mortem(sys.last_traceback) if __name__ == "__main__": print "To keep Python 2.6 happy, you now need to type 'python -m pudb.run'." print "Sorry for the inconvenience."
bace1847cb9479bfeb271f38eef502f8d3ac240a
qipr/registry/forms/facet_form.py
qipr/registry/forms/facet_form.py
from registry.models import * from operator import attrgetter facet_Models = [ BigAim, Category, ClinicalArea, ClinicalSetting, Keyword, SafetyTarget, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models)
from registry.models import * from operator import attrgetter facet_Models = [ BigAim, ClinicalArea, ClinicalSetting, Keyword, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models)
Remove facets from main registry project page
Remove facets from main registry project page
Python
apache-2.0
ctsit/qipr,ctsit/qipr,ctsit/qipr,ctsit/qipr,ctsit/qipr
from registry.models import * from operator import attrgetter facet_Models = [ BigAim, Category, ClinicalArea, ClinicalSetting, Keyword, SafetyTarget, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models) Remove facets from main registry project page
from registry.models import * from operator import attrgetter facet_Models = [ BigAim, ClinicalArea, ClinicalSetting, Keyword, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models)
<commit_before>from registry.models import * from operator import attrgetter facet_Models = [ BigAim, Category, ClinicalArea, ClinicalSetting, Keyword, SafetyTarget, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models) <commit_msg>Remove facets from main registry project page<commit_after>
from registry.models import * from operator import attrgetter facet_Models = [ BigAim, ClinicalArea, ClinicalSetting, Keyword, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models)
from registry.models import * from operator import attrgetter facet_Models = [ BigAim, Category, ClinicalArea, ClinicalSetting, Keyword, SafetyTarget, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models) Remove facets from main registry project pagefrom registry.models import * from operator import attrgetter facet_Models = [ BigAim, ClinicalArea, ClinicalSetting, Keyword, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models)
<commit_before>from registry.models import * from operator import attrgetter facet_Models = [ BigAim, Category, ClinicalArea, ClinicalSetting, Keyword, SafetyTarget, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models) <commit_msg>Remove facets from main registry project page<commit_after>from registry.models import * from operator import attrgetter facet_Models = [ BigAim, ClinicalArea, ClinicalSetting, Keyword, ] class FacetForm: def __init__(self): self.facet_categories = [model.__name__ for model in facet_Models] for model in facet_Models: models = list(model.objects.all()) models.sort(key=lambda m : m.project_set.count(), reverse=True) setattr(self, model.__name__, models)
4792515739c4ee671b86aeed39022ad8934d5d7c
artie/applications.py
artie/applications.py
import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename[-3:] == '.py': if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals())
import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename.endswith('.py'): if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals())
Use `endswith` instead of string indeces.
Use `endswith` instead of string indeces.
Python
mit
sumeet/artie
import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename[-3:] == '.py': if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals()) Use `endswith` instead of string indeces.
import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename.endswith('.py'): if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals())
<commit_before>import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename[-3:] == '.py': if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals()) <commit_msg>Use `endswith` instead of string indeces.<commit_after>
import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename.endswith('.py'): if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals())
import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename[-3:] == '.py': if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals()) Use `endswith` instead of string indeces.import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename.endswith('.py'): if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals())
<commit_before>import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename[-3:] == '.py': if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals()) <commit_msg>Use `endswith` instead of string indeces.<commit_after>import os import re import sys import settings triggers = set() timers = set() class BadApplicationError(Exception): pass def trigger(expression): def decorator(func): triggers.add((re.compile(expression), func)) return decorator def timer(time): def decorator(func): timers.add((time, func)) return decorator sys.path.insert(0, settings.APPLICATION_PATH) for filename in os.listdir(settings.APPLICATION_PATH): if filename != '__init__.py' and filename.endswith('.py'): if filename == 'triggers.py': raise BadApplicationException( "Application file can't be called triggers.py" ) module = filename[:-3] if module in sys.modules: reload(sys.modules[module]) else: __import__(module, locals(), globals())
46e1672bb0226ae8157d63a2d73edbfefcd644e9
src/test/test_google_maps.py
src/test/test_google_maps.py
import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): # TODO: Sane way to import key with open('/Users/nhc/git/pyrules/google-maps-api-key.txt') as f: self.key = f.read() def test_roundtrip(self): c = googlemaps.Client(key=self.key) r = driving_roundtrip(c, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, best_itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], best_itinerary) if __name__ == "__main__": unittest.main()
from os import environ import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): try: key = environ['GOOGLE_MAPS_API_KEY'] except KeyError: self.fail('This test requires an API key for Google Maps in the environment variable GOOGLE_MAPS_API_KEY') self.client = googlemaps.Client(key=key) def test_roundtrip(self): r = driving_roundtrip(self.client, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], itinerary) if __name__ == "__main__": unittest.main()
Move API key to environment variable
Move API key to environment variable
Python
mit
mr-niels-christensen/pyrules
import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): # TODO: Sane way to import key with open('/Users/nhc/git/pyrules/google-maps-api-key.txt') as f: self.key = f.read() def test_roundtrip(self): c = googlemaps.Client(key=self.key) r = driving_roundtrip(c, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, best_itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], best_itinerary) if __name__ == "__main__": unittest.main() Move API key to environment variable
from os import environ import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): try: key = environ['GOOGLE_MAPS_API_KEY'] except KeyError: self.fail('This test requires an API key for Google Maps in the environment variable GOOGLE_MAPS_API_KEY') self.client = googlemaps.Client(key=key) def test_roundtrip(self): r = driving_roundtrip(self.client, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], itinerary) if __name__ == "__main__": unittest.main()
<commit_before>import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): # TODO: Sane way to import key with open('/Users/nhc/git/pyrules/google-maps-api-key.txt') as f: self.key = f.read() def test_roundtrip(self): c = googlemaps.Client(key=self.key) r = driving_roundtrip(c, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, best_itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], best_itinerary) if __name__ == "__main__": unittest.main() <commit_msg>Move API key to environment variable<commit_after>
from os import environ import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): try: key = environ['GOOGLE_MAPS_API_KEY'] except KeyError: self.fail('This test requires an API key for Google Maps in the environment variable GOOGLE_MAPS_API_KEY') self.client = googlemaps.Client(key=key) def test_roundtrip(self): r = driving_roundtrip(self.client, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], itinerary) if __name__ == "__main__": unittest.main()
import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): # TODO: Sane way to import key with open('/Users/nhc/git/pyrules/google-maps-api-key.txt') as f: self.key = f.read() def test_roundtrip(self): c = googlemaps.Client(key=self.key) r = driving_roundtrip(c, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, best_itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], best_itinerary) if __name__ == "__main__": unittest.main() Move API key to environment variablefrom os import environ import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): try: key = environ['GOOGLE_MAPS_API_KEY'] except KeyError: self.fail('This test requires an API key for Google Maps in the environment variable GOOGLE_MAPS_API_KEY') self.client = googlemaps.Client(key=key) def test_roundtrip(self): r = driving_roundtrip(self.client, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], itinerary) if __name__ == "__main__": unittest.main()
<commit_before>import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): # TODO: Sane way to import key with open('/Users/nhc/git/pyrules/google-maps-api-key.txt') as f: self.key = f.read() def test_roundtrip(self): c = googlemaps.Client(key=self.key) r = driving_roundtrip(c, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, best_itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], best_itinerary) if __name__ == "__main__": unittest.main() <commit_msg>Move API key to environment variable<commit_after>from os import environ import unittest import googlemaps from pyrules2.googlemaps import driving_roundtrip COP = 'Copenhagen, Denmark' MAD = 'Madrid, Spain' BER = 'Berlin, Germany' LIS = 'Lisbon, Portugal' KM = 1000 class Test(unittest.TestCase): def setUp(self): try: key = environ['GOOGLE_MAPS_API_KEY'] except KeyError: self.fail('This test requires an API key for Google Maps in the environment variable GOOGLE_MAPS_API_KEY') self.client = googlemaps.Client(key=key) def test_roundtrip(self): r = driving_roundtrip(self.client, COP, MAD, BER, LIS) self.assertGreater(r.distance(), 10000 * KM) # Bad min_dist, itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives())) self.assertLess(min_dist, 6500 * KM) # Good self.assertListEqual([COP, LIS, MAD, BER, COP], itinerary) if __name__ == "__main__": unittest.main()
1bc3bd857e6b62d9cd63c6b2edfd7003dd8110b4
modules/apis/sr_com.py
modules/apis/sr_com.py
#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response
#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init__(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response
Fix forgotten __ for init
Fix forgotten __ for init
Python
mit
BatedUrGonnaDie/salty_bot
#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response Fix forgotten __ for init
#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init__(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response
<commit_before>#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response <commit_msg>Fix forgotten __ for init<commit_after>
#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init__(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response
#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response Fix forgotten __ for init#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init__(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response
<commit_before>#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response <commit_msg>Fix forgotten __ for init<commit_after>#! /usr/bin/env python2.7 import modules.apis.api_base as api class SRcomAPI(api.API): def __init__(self, session = None): super(SRcomAPI, self).__init__("http://www.speedrun.com/api/v1", session) def get_user_pbs(self, user, embeds = "", **kwargs): # Embeds should be list of parameters endpoint = "/users/{0}/personal-bests?embed={1}".format(user, ",".join(embeds)) success, response = self.get(endpoint, **kwargs) return success, response
51d1701bbc8b25bfd7b4f70c83fec7a46d965bef
fireplace/cards/brawl/decks_assemble.py
fireplace/cards/brawl/decks_assemble.py
""" Decks Assemble """ from ..utils import * # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1)
""" Decks Assemble """ from ..utils import * # Deckbuilding Enchant class TB_010: events = ( OWN_TURN_BEGIN.on(Discover(CONTROLLER, RandomCollectible())), Play(CONTROLLER).on(Shuffle(CONTROLLER, Copy(Play.CARD))), OWN_TURN_END.on(Shuffle(CONTROLLER, FRIENDLY_HAND)) ) # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1)
Implement Decks Assemble brawl rules on the Deckbuilding Enchant
Implement Decks Assemble brawl rules on the Deckbuilding Enchant
Python
agpl-3.0
Ragowit/fireplace,NightKev/fireplace,beheh/fireplace,smallnamespace/fireplace,Ragowit/fireplace,jleclanche/fireplace,smallnamespace/fireplace
""" Decks Assemble """ from ..utils import * # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1) Implement Decks Assemble brawl rules on the Deckbuilding Enchant
""" Decks Assemble """ from ..utils import * # Deckbuilding Enchant class TB_010: events = ( OWN_TURN_BEGIN.on(Discover(CONTROLLER, RandomCollectible())), Play(CONTROLLER).on(Shuffle(CONTROLLER, Copy(Play.CARD))), OWN_TURN_END.on(Shuffle(CONTROLLER, FRIENDLY_HAND)) ) # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1)
<commit_before>""" Decks Assemble """ from ..utils import * # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1) <commit_msg>Implement Decks Assemble brawl rules on the Deckbuilding Enchant<commit_after>
""" Decks Assemble """ from ..utils import * # Deckbuilding Enchant class TB_010: events = ( OWN_TURN_BEGIN.on(Discover(CONTROLLER, RandomCollectible())), Play(CONTROLLER).on(Shuffle(CONTROLLER, Copy(Play.CARD))), OWN_TURN_END.on(Shuffle(CONTROLLER, FRIENDLY_HAND)) ) # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1)
""" Decks Assemble """ from ..utils import * # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1) Implement Decks Assemble brawl rules on the Deckbuilding Enchant""" Decks Assemble """ from ..utils import * # Deckbuilding Enchant class TB_010: events = ( OWN_TURN_BEGIN.on(Discover(CONTROLLER, RandomCollectible())), Play(CONTROLLER).on(Shuffle(CONTROLLER, Copy(Play.CARD))), OWN_TURN_END.on(Shuffle(CONTROLLER, FRIENDLY_HAND)) ) # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1)
<commit_before>""" Decks Assemble """ from ..utils import * # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1) <commit_msg>Implement Decks Assemble brawl rules on the Deckbuilding Enchant<commit_after>""" Decks Assemble """ from ..utils import * # Deckbuilding Enchant class TB_010: events = ( OWN_TURN_BEGIN.on(Discover(CONTROLLER, RandomCollectible())), Play(CONTROLLER).on(Shuffle(CONTROLLER, Copy(Play.CARD))), OWN_TURN_END.on(Shuffle(CONTROLLER, FRIENDLY_HAND)) ) # Tarnished Coin class TB_011: play = ManaThisTurn(CONTROLLER, 1)
3f4415bd551b52418a5999a1ea64e31d15097802
framework/transactions/commands.py
framework/transactions/commands.py
# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions')
# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database from pymongo.errors import OperationFailure def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database try: database.command('rollbackTransaction') except OperationFailure: pass def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions')
Fix rollback transaction issue bby adding except for Operation Failure to rollback
Fix rollback transaction issue bby adding except for Operation Failure to rollback
Python
apache-2.0
erinspace/osf.io,doublebits/osf.io,brandonPurvis/osf.io,brandonPurvis/osf.io,kwierman/osf.io,GageGaskins/osf.io,hmoco/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,felliott/osf.io,cosenal/osf.io,abought/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,himanshuo/osf.io,sbt9uc/osf.io,bdyetton/prettychart,samchrisinger/osf.io,monikagrabowska/osf.io,mluo613/osf.io,TomHeatwole/osf.io,bdyetton/prettychart,CenterForOpenScience/osf.io,bdyetton/prettychart,laurenrevere/osf.io,haoyuchen1992/osf.io,leb2dg/osf.io,binoculars/osf.io,njantrania/osf.io,wearpants/osf.io,ZobairAlijan/osf.io,TomHeatwole/osf.io,icereval/osf.io,mattclark/osf.io,monikagrabowska/osf.io,jeffreyliu3230/osf.io,samanehsan/osf.io,caseyrygt/osf.io,billyhunt/osf.io,adlius/osf.io,RomanZWang/osf.io,cldershem/osf.io,arpitar/osf.io,caneruguz/osf.io,mluo613/osf.io,chrisseto/osf.io,cslzchen/osf.io,caneruguz/osf.io,lyndsysimon/osf.io,emetsger/osf.io,ckc6cz/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,jolene-esposito/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,dplorimer/osf,lamdnhan/osf.io,billyhunt/osf.io,aaxelb/osf.io,sloria/osf.io,ticklemepierce/osf.io,crcresearch/osf.io,alexschiller/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,lyndsysimon/osf.io,alexschiller/osf.io,fabianvf/osf.io,saradbowman/osf.io,pattisdr/osf.io,arpitar/osf.io,mfraezz/osf.io,KAsante95/osf.io,petermalcolm/osf.io,DanielSBrown/osf.io,kushG/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,icereval/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,reinaH/osf.io,KAsante95/osf.io,fabianvf/osf.io,abought/osf.io,felliott/osf.io,GageGaskins/osf.io,SSJohns/osf.io,adlius/osf.io,Ghalko/osf.io,binoculars/osf.io,acshi/osf.io,revanthkolli/osf.io,adlius/osf.io,petermalcolm/osf.io,samanehsan/osf.io,lamdnhan/osf.io,asanfilippo7/osf.io,erinspace/osf.io,acshi/osf.io,mattclark/osf.io,mluo613/osf.io,jnayak1/osf.io,chennan47/osf.io,saradbowman/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,himanshuo/osf.io,KAsante95/osf.io,chennan47/osf.io,acshi/osf.io,hmoco/osf.io,pattisdr/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,ckc6cz/osf.io,icereval/osf.io,HarryRybacki/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,sbt9uc/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,danielneis/osf.io,caseyrollins/osf.io,doublebits/osf.io,SSJohns/osf.io,leb2dg/osf.io,aaxelb/osf.io,mluke93/osf.io,rdhyee/osf.io,kushG/osf.io,revanthkolli/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,adlius/osf.io,cosenal/osf.io,danielneis/osf.io,hmoco/osf.io,GaryKriebel/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,sloria/osf.io,wearpants/osf.io,GageGaskins/osf.io,wearpants/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,emetsger/osf.io,Nesiehr/osf.io,abought/osf.io,abought/osf.io,leb2dg/osf.io,zkraime/osf.io,Ghalko/osf.io,zachjanicki/osf.io,GaryKriebel/osf.io,ckc6cz/osf.io,Ghalko/osf.io,mfraezz/osf.io,rdhyee/osf.io,TomBaxter/osf.io,pattisdr/osf.io,mluo613/osf.io,ticklemepierce/osf.io,reinaH/osf.io,jolene-esposito/osf.io,cslzchen/osf.io,crcresearch/osf.io,barbour-em/osf.io,njantrania/osf.io,kch8qx/osf.io,barbour-em/osf.io,samchrisinger/osf.io,cwisecarver/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,hmoco/osf.io,fabianvf/osf.io,reinaH/osf.io,erinspace/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,revanthkolli/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,njantrania/osf.io,RomanZWang/osf.io,alexschiller/osf.io,lamdnhan/osf.io,monikagrabowska/osf.io,mluke93/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,cosenal/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,Nesiehr/osf.io,doublebits/osf.io,GaryKriebel/osf.io,cldershem/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,binoculars/osf.io,lyndsysimon/osf.io,jnayak1/osf.io,wearpants/osf.io,cosenal/osf.io,fabianvf/osf.io,zkraime/osf.io,aaxelb/osf.io,mattclark/osf.io,jinluyuan/osf.io,rdhyee/osf.io,kushG/osf.io,jmcarp/osf.io,emetsger/osf.io,mluke93/osf.io,kushG/osf.io,ckc6cz/osf.io,jeffreyliu3230/osf.io,doublebits/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,acshi/osf.io,billyhunt/osf.io,caseyrygt/osf.io,ticklemepierce/osf.io,reinaH/osf.io,Johnetordoff/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,emetsger/osf.io,amyshi188/osf.io,KAsante95/osf.io,ZobairAlijan/osf.io,jinluyuan/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,cldershem/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,sloria/osf.io,GaryKriebel/osf.io,baylee-d/osf.io,kch8qx/osf.io,laurenrevere/osf.io,jinluyuan/osf.io,lyndsysimon/osf.io,amyshi188/osf.io,mluke93/osf.io,Ghalko/osf.io,himanshuo/osf.io,GageGaskins/osf.io,kwierman/osf.io,samanehsan/osf.io,jnayak1/osf.io,arpitar/osf.io,jmcarp/osf.io,zkraime/osf.io,zamattiac/osf.io,crcresearch/osf.io,himanshuo/osf.io,felliott/osf.io,jmcarp/osf.io,HarryRybacki/osf.io,mluo613/osf.io,HarryRybacki/osf.io,acshi/osf.io,bdyetton/prettychart,danielneis/osf.io,cldershem/osf.io,jolene-esposito/osf.io,jinluyuan/osf.io,kch8qx/osf.io,danielneis/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,samanehsan/osf.io,cwisecarver/osf.io,billyhunt/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,brianjgeiger/osf.io,zkraime/osf.io,TomBaxter/osf.io,chrisseto/osf.io,chennan47/osf.io,mfraezz/osf.io,felliott/osf.io,doublebits/osf.io,brandonPurvis/osf.io,baylee-d/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,cslzchen/osf.io,kwierman/osf.io,DanielSBrown/osf.io,arpitar/osf.io,barbour-em/osf.io,chrisseto/osf.io,caseyrollins/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,lamdnhan/osf.io,dplorimer/osf,dplorimer/osf,dplorimer/osf
# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions') Fix rollback transaction issue bby adding except for Operation Failure to rollback
# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database from pymongo.errors import OperationFailure def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database try: database.command('rollbackTransaction') except OperationFailure: pass def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions')
<commit_before># -*- coding: utf-8 -*- from framework.mongo import database as proxy_database def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions') <commit_msg>Fix rollback transaction issue bby adding except for Operation Failure to rollback<commit_after>
# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database from pymongo.errors import OperationFailure def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database try: database.command('rollbackTransaction') except OperationFailure: pass def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions')
# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions') Fix rollback transaction issue bby adding except for Operation Failure to rollback# -*- coding: utf-8 -*- from framework.mongo import database as proxy_database from pymongo.errors import OperationFailure def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database try: database.command('rollbackTransaction') except OperationFailure: pass def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions')
<commit_before># -*- coding: utf-8 -*- from framework.mongo import database as proxy_database def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions') <commit_msg>Fix rollback transaction issue bby adding except for Operation Failure to rollback<commit_after># -*- coding: utf-8 -*- from framework.mongo import database as proxy_database from pymongo.errors import OperationFailure def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database try: database.command('rollbackTransaction') except OperationFailure: pass def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions')
7076e7eb0fcce37159aa58c2c0699674434115d9
relaygram/http_server.py
relaygram/http_server.py
import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler
import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler
Use commonprefix to break dep on 3.5
Use commonprefix to break dep on 3.5
Python
mit
Surye/relaygram
import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler Use commonprefix to break dep on 3.5
import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler
<commit_before>import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler <commit_msg>Use commonprefix to break dep on 3.5<commit_after>
import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler
import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler Use commonprefix to break dep on 3.5import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler
<commit_before>import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler <commit_msg>Use commonprefix to break dep on 3.5<commit_after>import http.server from threading import Thread import os.path class HTTPHandler: def __init__(self, config): self.config = config handler = HTTPHandler.make_http_handler(self.config['media_dir']) self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler) self.thread = Thread(target=self.main_loop) def run(self): self.thread.start() return self def main_loop(self): self.httpd.serve_forever() @staticmethod def make_http_handler(root_path): class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler): def __init__(self, *args, **kwargs): super(RelayGramHTTPHandler, self).__init__(*args, **kwargs) def do_GET(self): file_path = os.path.abspath(root_path + self.path) if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt self.send_error(501, "Nice try") else: if not os.path.exists(file_path) or not os.path.isfile(file_path): self.send_error(404, 'File Not Found') else: self.send_response(200) self.wfile.write(open(file_path, mode='rb').read()) return RelayGramHTTPHandler
43dca4ad969e44bb753c152e8f7768febea6fb68
quantecon/__init__.py
quantecon/__init__.py
""" Import the main names to top level. """ from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__
""" Import the main names to top level. """ try: import numba except: raise ImportError("Cannot import numba from current anaconda distribution. Please run `conda install numba` to install the latest version.") from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__
Add Check for numba in base anaconda distribution. If not found issue meaningful warning message
Add Check for numba in base anaconda distribution. If not found issue meaningful warning message
Python
bsd-3-clause
oyamad/QuantEcon.py,QuantEcon/QuantEcon.py,QuantEcon/QuantEcon.py,oyamad/QuantEcon.py
""" Import the main names to top level. """ from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__ Add Check for numba in base anaconda distribution. If not found issue meaningful warning message
""" Import the main names to top level. """ try: import numba except: raise ImportError("Cannot import numba from current anaconda distribution. Please run `conda install numba` to install the latest version.") from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__
<commit_before>""" Import the main names to top level. """ from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__ <commit_msg>Add Check for numba in base anaconda distribution. If not found issue meaningful warning message<commit_after>
""" Import the main names to top level. """ try: import numba except: raise ImportError("Cannot import numba from current anaconda distribution. Please run `conda install numba` to install the latest version.") from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__
""" Import the main names to top level. """ from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__ Add Check for numba in base anaconda distribution. If not found issue meaningful warning message""" Import the main names to top level. """ try: import numba except: raise ImportError("Cannot import numba from current anaconda distribution. Please run `conda install numba` to install the latest version.") from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__
<commit_before>""" Import the main names to top level. """ from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__ <commit_msg>Add Check for numba in base anaconda distribution. If not found issue meaningful warning message<commit_after>""" Import the main names to top level. """ try: import numba except: raise ImportError("Cannot import numba from current anaconda distribution. Please run `conda install numba` to install the latest version.") from .compute_fp import compute_fixed_point from .discrete_rv import DiscreteRV from .ecdf import ECDF from .estspec import smooth, periodogram, ar_periodogram from .graph_tools import DiGraph from .gridtools import cartesian, mlinspace from .kalman import Kalman from .lae import LAE from .arma import ARMA from .lqcontrol import LQ from .lqnash import nnash from .lss import LinearStateSpace from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati from .quadsums import var_quadratic_sum, m_quadratic_sum #->Propose Delete From Top Level from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package #<- from .rank_nullspace import rank_est, nullspace from .robustlq import RBLQ from . import quad as quad from .util import searchsorted #Add Version Attribute from .version import version as __version__
191812e1e16aea352c1a47cd5bd6cd3f0ed67802
runtests.py
runtests.py
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ) ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ), MIDDLEWARE_CLASSES = (), ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
Remove warning for Django 1.7
Remove warning for Django 1.7
Python
bsd-3-clause
edoburu/django-template-analyzer,edoburu/django-template-analyzer
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ) ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests() Remove warning for Django 1.7
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ), MIDDLEWARE_CLASSES = (), ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
<commit_before>#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ) ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests() <commit_msg>Remove warning for Django 1.7<commit_after>
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ), MIDDLEWARE_CLASSES = (), ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ) ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests() Remove warning for Django 1.7#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ), MIDDLEWARE_CLASSES = (), ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
<commit_before>#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ) ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests() <commit_msg>Remove warning for Django 1.7<commit_after>#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DEBUG = True, TEMPLATE_DEBUG = True, DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:' } }, TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ), INSTALLED_APPS = ( 'template_analyzer', ), MIDDLEWARE_CLASSES = (), ) def runtests(): argv = sys.argv[:1] + ['test', 'template_analyzer', '--traceback'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
37fcbbe7bb8a46bd32fc92341d1b42f2400abc9b
runtests.py
runtests.py
#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite)
#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) #suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite)
Disable flatfile tests until atom tests complete.
Disable flatfile tests until atom tests complete.
Python
bsd-3-clause
rescrv/firmant
#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite) Disable flatfile tests until atom tests complete.
#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) #suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite)
<commit_before>#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite) <commit_msg>Disable flatfile tests until atom tests complete.<commit_after>
#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) #suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite)
#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite) Disable flatfile tests until atom tests complete.#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) #suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite)
<commit_before>#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite) <commit_msg>Disable flatfile tests until atom tests complete.<commit_after>#!/usr/bin/python import unittest from firmant.utils import get_module # Import this now to avoid it throwing errors. import pytz from firmant.configuration import settings from test.configuration import suite as configuration_tests from test.datasource.atom import suite as atom_tests from test.plugins.datasource.flatfile.atom import suite as flatfile_atom_tests from test.resolvers import suite as resolvers_tests if __name__ == '__main__': settings.reconfigure('test_settings') for plugin in settings['PLUGINS']: try: mod = get_module(plugin) except ImportError: raise suite = unittest.TestSuite() suite.addTests(configuration_tests) suite.addTests(atom_tests) #suite.addTests(flatfile_atom_tests) suite.addTests(resolvers_tests) unittest.TextTestRunner(verbosity=2).run(suite)
2a5b81ff6272f346bcca3bdc97e3d6d9dfe2b017
src/mmw/apps/bigcz/models.py
src/mmw/apps/bigcz/models.py
# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.set_srid(4326) return polygon.transform(5070, clone=True).area
# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.srid = 4326 return polygon.transform(5070, clone=True).area
Set SRID the new way
Set SRID the new way
Python
apache-2.0
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.set_srid(4326) return polygon.transform(5070, clone=True).area Set SRID the new way
# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.srid = 4326 return polygon.transform(5070, clone=True).area
<commit_before># -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.set_srid(4326) return polygon.transform(5070, clone=True).area <commit_msg>Set SRID the new way<commit_after>
# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.srid = 4326 return polygon.transform(5070, clone=True).area
# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.set_srid(4326) return polygon.transform(5070, clone=True).area Set SRID the new way# -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.srid = 4326 return polygon.transform(5070, clone=True).area
<commit_before># -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.set_srid(4326) return polygon.transform(5070, clone=True).area <commit_msg>Set SRID the new way<commit_after># -*- coding: utf-8 -*- from django.contrib.gis.geos import Polygon class ResourceLink(object): def __init__(self, type, href): self.type = type self.href = href class Resource(object): def __init__(self, id, description, author, links, title, created_at, updated_at, geom): self.id = id self.title = title self.description = description self.author = author self.links = links self.created_at = created_at self.updated_at = updated_at self.geom = geom class ResourceList(object): def __init__(self, api_url, catalog, count, results): self.api_url = api_url self.catalog = catalog self.count = count self.results = results class BBox(object): """ Bounding box from incoming search API request. """ def __init__(self, xmin, ymin, xmax, ymax): self.xmin = xmin self.xmax = xmax self.ymin = ymin self.ymax = ymax def area(self): polygon = Polygon.from_bbox(( self.xmin, self.ymin, self.xmax, self.ymax)) polygon.srid = 4326 return polygon.transform(5070, clone=True).area
9d3d2beab6ec06ce13126b818029258a66f450f6
babelfish/__init__.py
babelfish/__init__.py
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, Script
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, SCRIPT_MATRIX, Script
Add SCRIPT_MATRIX to babelfish module imports
Add SCRIPT_MATRIX to babelfish module imports
Python
bsd-3-clause
Diaoul/babelfish
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, Script Add SCRIPT_MATRIX to babelfish module imports
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, SCRIPT_MATRIX, Script
<commit_before># -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, Script <commit_msg>Add SCRIPT_MATRIX to babelfish module imports<commit_after>
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, SCRIPT_MATRIX, Script
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, Script Add SCRIPT_MATRIX to babelfish module imports# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, SCRIPT_MATRIX, Script
<commit_before># -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, Script <commit_msg>Add SCRIPT_MATRIX to babelfish module imports<commit_after># -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # __title__ = 'babelfish' __version__ = '0.4.1' __author__ = 'Antoine Bertin' __license__ = 'BSD' __copyright__ = 'Copyright 2013 the BabelFish authors' from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter, CountryReverseConverter) from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language from .script import SCRIPTS, SCRIPT_MATRIX, Script
2ad21d67ccde2e25ea5c6d64cdee36dbc6425cbc
construct/tests/test_mapping.py
construct/tests/test_mapping.py
import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00")
import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_parse_default(self): flag = Flag("flag") self.assertFalse(flag.parse("\x02")) def test_parse_default_true(self): flag = Flag("flag", default=True) self.assertTrue(flag.parse("\x02")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00")
Add a couple more Flag tests.
tests: Add a couple more Flag tests.
Python
mit
riggs/construct,mosquito/construct,gkonstantyno/construct,MostAwesomeDude/construct,0000-bigtree/construct,riggs/construct,mosquito/construct,0000-bigtree/construct,gkonstantyno/construct,MostAwesomeDude/construct
import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00") tests: Add a couple more Flag tests.
import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_parse_default(self): flag = Flag("flag") self.assertFalse(flag.parse("\x02")) def test_parse_default_true(self): flag = Flag("flag", default=True) self.assertTrue(flag.parse("\x02")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00")
<commit_before>import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00") <commit_msg>tests: Add a couple more Flag tests.<commit_after>
import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_parse_default(self): flag = Flag("flag") self.assertFalse(flag.parse("\x02")) def test_parse_default_true(self): flag = Flag("flag", default=True) self.assertTrue(flag.parse("\x02")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00")
import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00") tests: Add a couple more Flag tests.import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_parse_default(self): flag = Flag("flag") self.assertFalse(flag.parse("\x02")) def test_parse_default_true(self): flag = Flag("flag", default=True) self.assertTrue(flag.parse("\x02")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00")
<commit_before>import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00") <commit_msg>tests: Add a couple more Flag tests.<commit_after>import unittest from construct import Flag class TestFlag(unittest.TestCase): def test_parse(self): flag = Flag("flag") self.assertTrue(flag.parse("\x01")) def test_parse_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertFalse(flag.parse("\x01")) def test_parse_default(self): flag = Flag("flag") self.assertFalse(flag.parse("\x02")) def test_parse_default_true(self): flag = Flag("flag", default=True) self.assertTrue(flag.parse("\x02")) def test_build(self): flag = Flag("flag") self.assertEqual(flag.build(True), "\x01") def test_build_flipped(self): flag = Flag("flag", truth=0, falsehood=1) self.assertEqual(flag.build(True), "\x00")
b2dd21b2240eec28881d6162f9e35b16df906219
arris_cli.py
arris_cli.py
#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")
#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")
Tweak formatting of argparse section to minimize lines extending past 80 chars.
Tweak formatting of argparse section to minimize lines extending past 80 chars.
Python
mit
wolrah/arris_stats
#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")Tweak formatting of argparse section to minimize lines extending past 80 chars.
#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")
<commit_before>#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")<commit_msg>Tweak formatting of argparse section to minimize lines extending past 80 chars.<commit_after>
#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")
#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")Tweak formatting of argparse section to minimize lines extending past 80 chars.#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")
<commit_before>#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")<commit_msg>Tweak formatting of argparse section to minimize lines extending past 80 chars.<commit_after>#!/usr/bin/env python # CLI frontend to Arris modem stat scraper library arris_scraper.py import argparse import arris_scraper import json import pprint default_url = 'http://192.168.100.1/cgi-bin/status_cgi' parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.') parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format') parser.add_argument('-u', '--url', default=default_url, help='url of modem status page') args = parser.parse_args() if args.output_format == 'ascii': print("ASCII output not yet implemented") elif args.output_format == 'json': result = arris_scraper.get_status(args.url) print(json.dumps(result)) elif args.output_format == 'pprint': result = arris_scraper.get_status(args.url) pprint.pprint(result) else: print("How in the world did you get here?")
4d7dff1c335a49d13d420f3c62b1a2d2382351dd
trajprocess/tests/utils.py
trajprocess/tests/utils.py
"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd)
"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename # command for generating reference data: # gmx mdrun -nsteps 5000 -s frame0.tpr -cpi -noappend # # Do that three times. def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd)
Add note about how to generate trajectories
Add note about how to generate trajectories
Python
mit
mpharrigan/trajprocess,mpharrigan/trajprocess
"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd) Add note about how to generate trajectories
"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename # command for generating reference data: # gmx mdrun -nsteps 5000 -s frame0.tpr -cpi -noappend # # Do that three times. def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd)
<commit_before>"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd) <commit_msg>Add note about how to generate trajectories<commit_after>
"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename # command for generating reference data: # gmx mdrun -nsteps 5000 -s frame0.tpr -cpi -noappend # # Do that three times. def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd)
"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd) Add note about how to generate trajectories"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename # command for generating reference data: # gmx mdrun -nsteps 5000 -s frame0.tpr -cpi -noappend # # Do that three times. def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd)
<commit_before>"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd) <commit_msg>Add note about how to generate trajectories<commit_after>"""Tools for setting up a fake directory structure for processing.""" from tempfile import mkdtemp import os import shutil import json from pkg_resources import resource_filename # command for generating reference data: # gmx mdrun -nsteps 5000 -s frame0.tpr -cpi -noappend # # Do that three times. def write_run_clone(proj, run, clone, gens=None): if gens is None: gens = [0, 1] rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run, clone=clone) os.makedirs(rc, exist_ok=True) tpr_fn = resource_filename(__name__, 'topol.tpr') shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc)) for gen in gens: shutil.copy(resource_filename(__name__, "traj_comp.part{:04d}.xtc".format( gen + 1)), "{}/frame{}.xtc".format(rc, gen)) def generate_project(): global wd wd = mkdtemp() os.chdir(wd) write_run_clone(1234, 5, 7) write_run_clone(1234, 6, 0) with open('structs-p1234.json', 'w') as f: json.dump({ 5: {'struct': 'stru1', 'fext': 'pdb'}, 6: {'struct': 'stru2', 'fext': 'pdb'} }, f) def cleanup(): shutil.rmtree(wd)
a465922385ef12cf01f2a8a75928470c33f39569
snakepit/directory.py
snakepit/directory.py
""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, nullable=False, index=True, ), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new
""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, primary_key=True), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new
Make hive_primary_DIMENSION.id primary key, so autoincrement works.
Make hive_primary_DIMENSION.id primary key, so autoincrement works.
Python
mit
tv42/snakepit,tv42/snakepit
""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, nullable=False, index=True, ), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new Make hive_primary_DIMENSION.id primary key, so autoincrement works.
""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, primary_key=True), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new
<commit_before>""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, nullable=False, index=True, ), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new <commit_msg>Make hive_primary_DIMENSION.id primary key, so autoincrement works.<commit_after>
""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, primary_key=True), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new
""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, nullable=False, index=True, ), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new Make hive_primary_DIMENSION.id primary key, so autoincrement works.""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, primary_key=True), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new
<commit_before>""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, nullable=False, index=True, ), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new <commit_msg>Make hive_primary_DIMENSION.id primary key, so autoincrement works.<commit_after>""" HiveDB client access via SQLAlchemy """ import sqlalchemy as sq metadata = sq.MetaData() hive_primary = sq.Table( 'hive_primary_DIMENSION', metadata, sq.Column('id', sq.Integer, primary_key=True), sq.Column('node', sq.SmallInteger, nullable=False, index=True, ), sq.Column('secondary_index_count', sq.Integer, nullable=False), # Hive_ERD.png says "date", but I think you want time too sq.Column('last_updated', sq.DateTime, nullable=False, index=True, ), sq.Column('read_only', sq.Boolean, nullable=False, default=False), sq.UniqueConstraint('id', 'node'), ) hive_secondary = sq.Table( 'hive_secondary_RESOURCE_COLUMN', metadata, sq.Column('id', sq.Integer, nullable=True, index=True, ), sq.Column('pkey', sq.Integer, sq.ForeignKey("hive_primary_TODO.id"), nullable=False, index=True, ), ) def dynamic_table(table, metadata, name): """ Access C{table} under new C{metadata} with new C{name}. """ new = metadata.tables.get(name, None) if new is not None: return new new = sq.Table( name, metadata, *[c.copy() for c in table.columns]) return new
6f22416734525376b0a2143972b9546df3164751
databaker/databaker_nbconvert.py
databaker/databaker_nbconvert.py
#!/usr/bin/env python import os import subprocess import sys def main(argv): if len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
#!/usr/bin/env python import os import subprocess import sys def main(argv=None): if argv is None or len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
Set default argv to None
Set default argv to None
Python
agpl-3.0
scraperwiki/databaker,scraperwiki/databaker
#!/usr/bin/env python import os import subprocess import sys def main(argv): if len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:]) Set default argv to None
#!/usr/bin/env python import os import subprocess import sys def main(argv=None): if argv is None or len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
<commit_before>#!/usr/bin/env python import os import subprocess import sys def main(argv): if len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:]) <commit_msg>Set default argv to None<commit_after>
#!/usr/bin/env python import os import subprocess import sys def main(argv=None): if argv is None or len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
#!/usr/bin/env python import os import subprocess import sys def main(argv): if len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:]) Set default argv to None#!/usr/bin/env python import os import subprocess import sys def main(argv=None): if argv is None or len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
<commit_before>#!/usr/bin/env python import os import subprocess import sys def main(argv): if len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:]) <commit_msg>Set default argv to None<commit_after>#!/usr/bin/env python import os import subprocess import sys def main(argv=None): if argv is None or len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
34d1bbc36f7d5c66000eec0d6debfd3ede74366f
bottle_auth/custom.py
bottle_auth/custom.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session return {}
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session self.redirect(environ)
Fix Custom class, user exit in beaker.session redirect to login page
Fix Custom class, user exit in beaker.session redirect to login page
Python
mit
avelino/bottle-auth
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session return {} Fix Custom class, user exit in beaker.session redirect to login page
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session self.redirect(environ)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session return {} <commit_msg>Fix Custom class, user exit in beaker.session redirect to login page<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session self.redirect(environ)
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session return {} Fix Custom class, user exit in beaker.session redirect to login page#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session self.redirect(environ)
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session return {} <commit_msg>Fix Custom class, user exit in beaker.session redirect to login page<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from bottle import redirect log = logging.getLogger('bottle-auth.custom') class Custom(object): def __init__(self, login_url="/login", callback_url="http://127.0.0.1:8000"): self.login_url = login_url self.callback_url = callback_url def redirect(self, environ): return redirect(self.login_url) def get_user(self, environ): session = environ.get('beaker.session') if session.get("username", None) and session.get("apikey", None): return session self.redirect(environ)
66edf9f04c1b23681fae4234a8b297868e66b7aa
osmaxx-py/osmaxx/excerptexport/models/excerpt.py
osmaxx-py/osmaxx/excerptexport/models/excerpt.py
from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name'), blank=False) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user)
from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name')) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user)
Remove value which is already default
Remove value which is already default
Python
mit
geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx
from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name'), blank=False) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user) Remove value which is already default
from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name')) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user)
<commit_before>from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name'), blank=False) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user) <commit_msg>Remove value which is already default<commit_after>
from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name')) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user)
from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name'), blank=False) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user) Remove value which is already defaultfrom django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name')) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user)
<commit_before>from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name'), blank=False) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user) <commit_msg>Remove value which is already default<commit_after>from django.db import models from django.contrib.auth.models import User from django.utils.translation import gettext_lazy as _ class Excerpt(models.Model): name = models.CharField(max_length=128, verbose_name=_('name')) is_public = models.BooleanField(default=False, verbose_name=_('is public')) is_active = models.BooleanField(default=True, verbose_name=_('is active')) owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner')) bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry')) @property def type_of_geometry(self): return self.bounding_geometry.type_of_geometry @property def extent(self): return self.bounding_geometry.extent def __str__(self): return self.name def _active_excerpts(): return Excerpt.objects.filter(is_active=True).filter( bounding_geometry__bboxboundinggeometry__isnull=False ) def private_user_excerpts(user): return _active_excerpts().filter(is_public=False, owner=user) def public_user_excerpts(user): return _active_excerpts().filter(is_public=True, owner=user) def other_users_public_excerpts(user): return _active_excerpts().filter(is_public=True).exclude(owner=user)
e0a9b97792f48efdfb166b785a8ba1fd2448a171
cref/structure/__init__.py
cref/structure/__init__.py
# import porter_paleale def predict_secondary_structure(sequence): """ Predict the secondary structure of a given sequence :param sequence: Amino acid sequence :return: Secondary structure prediction as a string H = helix E = Strand C =r coil """ # return porter_paleale.predict(sequence) return "C" * len(sequence) def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
Add skeleton for secondary structure and write pdb module
Add skeleton for secondary structure and write pdb module
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
Add skeleton for secondary structure and write pdb module
# import porter_paleale def predict_secondary_structure(sequence): """ Predict the secondary structure of a given sequence :param sequence: Amino acid sequence :return: Secondary structure prediction as a string H = helix E = Strand C =r coil """ # return porter_paleale.predict(sequence) return "C" * len(sequence) def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
<commit_before><commit_msg>Add skeleton for secondary structure and write pdb module<commit_after>
# import porter_paleale def predict_secondary_structure(sequence): """ Predict the secondary structure of a given sequence :param sequence: Amino acid sequence :return: Secondary structure prediction as a string H = helix E = Strand C =r coil """ # return porter_paleale.predict(sequence) return "C" * len(sequence) def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
Add skeleton for secondary structure and write pdb module# import porter_paleale def predict_secondary_structure(sequence): """ Predict the secondary structure of a given sequence :param sequence: Amino acid sequence :return: Secondary structure prediction as a string H = helix E = Strand C =r coil """ # return porter_paleale.predict(sequence) return "C" * len(sequence) def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
<commit_before><commit_msg>Add skeleton for secondary structure and write pdb module<commit_after># import porter_paleale def predict_secondary_structure(sequence): """ Predict the secondary structure of a given sequence :param sequence: Amino acid sequence :return: Secondary structure prediction as a string H = helix E = Strand C =r coil """ # return porter_paleale.predict(sequence) return "C" * len(sequence) def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
cddb0309eaa0c31569f791b8b9f2c8666b65b8b4
openrcv/test/test_models.py
openrcv/test/test_models.py
from openrcv.models import ContestInfo from openrcv.utiltest.helpers import UnitCase class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4))
from textwrap import dedent from openrcv.models import BallotsResource, BallotStreamResource, ContestInfo from openrcv.utils import StringInfo from openrcv.utiltest.helpers import UnitCase class BallotsResourceTest(UnitCase): def test(self): ballots = [1, 3, 2] ballot_resource = BallotsResource(ballots) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, [1, 3, 2]) class BallotStreamResourceTest(UnitCase): def test(self): ballot_info = StringInfo("2 1 2\n3 1\n") ballot_resource = BallotStreamResource(ballot_info) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2\n', '3 1\n']) def test_parse_default(self): ballot_info = StringInfo("2 1 2\n3 1\n") parse = lambda line: line.strip() ballot_resource = BallotStreamResource(ballot_info, parse=parse) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2', '3 1']) class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4))
Add tests for ballots resource classes.
Add tests for ballots resource classes.
Python
mit
cjerdonek/open-rcv,cjerdonek/open-rcv
from openrcv.models import ContestInfo from openrcv.utiltest.helpers import UnitCase class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4)) Add tests for ballots resource classes.
from textwrap import dedent from openrcv.models import BallotsResource, BallotStreamResource, ContestInfo from openrcv.utils import StringInfo from openrcv.utiltest.helpers import UnitCase class BallotsResourceTest(UnitCase): def test(self): ballots = [1, 3, 2] ballot_resource = BallotsResource(ballots) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, [1, 3, 2]) class BallotStreamResourceTest(UnitCase): def test(self): ballot_info = StringInfo("2 1 2\n3 1\n") ballot_resource = BallotStreamResource(ballot_info) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2\n', '3 1\n']) def test_parse_default(self): ballot_info = StringInfo("2 1 2\n3 1\n") parse = lambda line: line.strip() ballot_resource = BallotStreamResource(ballot_info, parse=parse) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2', '3 1']) class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4))
<commit_before> from openrcv.models import ContestInfo from openrcv.utiltest.helpers import UnitCase class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4)) <commit_msg>Add tests for ballots resource classes.<commit_after>
from textwrap import dedent from openrcv.models import BallotsResource, BallotStreamResource, ContestInfo from openrcv.utils import StringInfo from openrcv.utiltest.helpers import UnitCase class BallotsResourceTest(UnitCase): def test(self): ballots = [1, 3, 2] ballot_resource = BallotsResource(ballots) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, [1, 3, 2]) class BallotStreamResourceTest(UnitCase): def test(self): ballot_info = StringInfo("2 1 2\n3 1\n") ballot_resource = BallotStreamResource(ballot_info) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2\n', '3 1\n']) def test_parse_default(self): ballot_info = StringInfo("2 1 2\n3 1\n") parse = lambda line: line.strip() ballot_resource = BallotStreamResource(ballot_info, parse=parse) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2', '3 1']) class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4))
from openrcv.models import ContestInfo from openrcv.utiltest.helpers import UnitCase class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4)) Add tests for ballots resource classes. from textwrap import dedent from openrcv.models import BallotsResource, BallotStreamResource, ContestInfo from openrcv.utils import StringInfo from openrcv.utiltest.helpers import UnitCase class BallotsResourceTest(UnitCase): def test(self): ballots = [1, 3, 2] ballot_resource = BallotsResource(ballots) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, [1, 3, 2]) class BallotStreamResourceTest(UnitCase): def test(self): ballot_info = StringInfo("2 1 2\n3 1\n") ballot_resource = BallotStreamResource(ballot_info) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2\n', '3 1\n']) def test_parse_default(self): ballot_info = StringInfo("2 1 2\n3 1\n") parse = lambda line: line.strip() ballot_resource = BallotStreamResource(ballot_info, parse=parse) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2', '3 1']) class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4))
<commit_before> from openrcv.models import ContestInfo from openrcv.utiltest.helpers import UnitCase class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4)) <commit_msg>Add tests for ballots resource classes.<commit_after> from textwrap import dedent from openrcv.models import BallotsResource, BallotStreamResource, ContestInfo from openrcv.utils import StringInfo from openrcv.utiltest.helpers import UnitCase class BallotsResourceTest(UnitCase): def test(self): ballots = [1, 3, 2] ballot_resource = BallotsResource(ballots) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, [1, 3, 2]) class BallotStreamResourceTest(UnitCase): def test(self): ballot_info = StringInfo("2 1 2\n3 1\n") ballot_resource = BallotStreamResource(ballot_info) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2\n', '3 1\n']) def test_parse_default(self): ballot_info = StringInfo("2 1 2\n3 1\n") parse = lambda line: line.strip() ballot_resource = BallotStreamResource(ballot_info, parse=parse) with ballot_resource() as ballots: ballots = list(ballots) self.assertEqual(ballots, ['2 1 2', '3 1']) class ContestInfoTest(UnitCase): def test_get_candidates(self): contest = ContestInfo() contest.candidates = ["Alice", "Bob", "Carl"] self.assertEqual(contest.get_candidates(), range(1, 4))
1a65ebc4a36da37840b3bd74666bf7f607ed190b
oshot/context_processors.py
oshot/context_processors.py
from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context
from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} try: kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) except AttributeError: pass if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context
Clean bug with static file serving
Clean bug with static file serving
Python
bsd-3-clause
hasadna/open-shot,hasadna/open-shot,hasadna/open-shot
from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context Clean bug with static file serving
from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} try: kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) except AttributeError: pass if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context
<commit_before>from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context <commit_msg>Clean bug with static file serving<commit_after>
from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} try: kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) except AttributeError: pass if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context
from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context Clean bug with static file servingfrom django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} try: kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) except AttributeError: pass if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context
<commit_before>from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context <commit_msg>Clean bug with static file serving<commit_after>from django.contrib.auth.forms import AuthenticationForm from django.contrib.sites.models import get_current_site from django.conf import settings from haystack.forms import SearchForm from entities.models import Entity from oshot.forms import EntityChoiceForm def forms(request): context = {"search_form": SearchForm()} try: kwargs = request.resolver_match.kwargs if 'entity_slug' in kwargs: entity = Entity.objects.get(slug=kwargs['entity_slug']) initial = {'entity': entity.id} elif 'entity_id' in kwargs: entity = Entity.objects.get(id=kwargs['entity_id']) initial = {'entity': entity.id} else: initial = {} context['entity_form'] = EntityChoiceForm(initial=initial, auto_id=False) except AttributeError: pass if not request.user.is_authenticated(): context["login_form"] = AuthenticationForm() # TODO: remove context["site"] = get_current_site(request) context["ANALYTICS_ID"] = getattr(settings, 'ANALYTICS_ID', False) return context
771f429433d201463ab94439870d1bc803022722
nap/auth.py
nap/auth.py
from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator permit_logged_in = permit( lambda self, *args, **kwargs: self.request.user.is_authenticated() ) permit_staff = permit( lambda self, *args, **kwargs: self.request.user.is_staff ) def permit_groups(*groups): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups)
from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator # Helpers for people wanting to control response class def test_logged_in(self, *args, **kwargs): return self.request.user.is_authenticated() def test_staff(self, *args, **kwargs): return self.request.user.is_staff permit_logged_in = permit(test_logged_in) permit_staff = permit(test_staff) def permit_groups(*groups, response_class=http.Forbidden): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups, response_class=response_class)
Make it DRYer for people
Make it DRYer for people
Python
bsd-3-clause
limbera/django-nap
from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator permit_logged_in = permit( lambda self, *args, **kwargs: self.request.user.is_authenticated() ) permit_staff = permit( lambda self, *args, **kwargs: self.request.user.is_staff ) def permit_groups(*groups): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups) Make it DRYer for people
from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator # Helpers for people wanting to control response class def test_logged_in(self, *args, **kwargs): return self.request.user.is_authenticated() def test_staff(self, *args, **kwargs): return self.request.user.is_staff permit_logged_in = permit(test_logged_in) permit_staff = permit(test_staff) def permit_groups(*groups, response_class=http.Forbidden): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups, response_class=response_class)
<commit_before>from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator permit_logged_in = permit( lambda self, *args, **kwargs: self.request.user.is_authenticated() ) permit_staff = permit( lambda self, *args, **kwargs: self.request.user.is_staff ) def permit_groups(*groups): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups) <commit_msg>Make it DRYer for people<commit_after>
from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator # Helpers for people wanting to control response class def test_logged_in(self, *args, **kwargs): return self.request.user.is_authenticated() def test_staff(self, *args, **kwargs): return self.request.user.is_staff permit_logged_in = permit(test_logged_in) permit_staff = permit(test_staff) def permit_groups(*groups, response_class=http.Forbidden): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups, response_class=response_class)
from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator permit_logged_in = permit( lambda self, *args, **kwargs: self.request.user.is_authenticated() ) permit_staff = permit( lambda self, *args, **kwargs: self.request.user.is_staff ) def permit_groups(*groups): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups) Make it DRYer for peoplefrom __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator # Helpers for people wanting to control response class def test_logged_in(self, *args, **kwargs): return self.request.user.is_authenticated() def test_staff(self, *args, **kwargs): return self.request.user.is_staff permit_logged_in = permit(test_logged_in) permit_staff = permit(test_staff) def permit_groups(*groups, response_class=http.Forbidden): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups, response_class=response_class)
<commit_before>from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator permit_logged_in = permit( lambda self, *args, **kwargs: self.request.user.is_authenticated() ) permit_staff = permit( lambda self, *args, **kwargs: self.request.user.is_staff ) def permit_groups(*groups): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups) <commit_msg>Make it DRYer for people<commit_after>from __future__ import unicode_literals # Authentication and Authorisation from functools import wraps from . import http def permit(test_func, response_class=http.Forbidden): '''Decorate a handler to control access''' def decorator(view_func): @wraps(view_func) def _wrapped_view(self, *args, **kwargs): if test_func(self, *args, **kwargs): return view_func(self, *args, **kwargs) return response_class() return _wrapped_view return decorator # Helpers for people wanting to control response class def test_logged_in(self, *args, **kwargs): return self.request.user.is_authenticated() def test_staff(self, *args, **kwargs): return self.request.user.is_staff permit_logged_in = permit(test_logged_in) permit_staff = permit(test_staff) def permit_groups(*groups, response_class=http.Forbidden): def in_groups(self, *args, **kwargs): return self.request.user.groups.filter(name__in=groups).exists() return permit(in_groups, response_class=response_class)
a3a9da03da0691af53526096992a56fcaeecb642
py/test/selenium/webdriver/common/proxy_tests.py
py/test/selenium/webdriver/common/proxy_tests.py
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'manual', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities)
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'MANUAL', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities)
Fix test as well :)
DanielWagnerHall: Fix test as well :) git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@17825 07704840-8298-11de-bf8c-fd130f914ac9
Python
apache-2.0
jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,yumingjuan/selenium,yumingjuan/selenium,yumingjuan/selenium,yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,yumingjuan/selenium
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'manual', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities) DanielWagnerHall: Fix test as well :) git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@17825 07704840-8298-11de-bf8c-fd130f914ac9
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'MANUAL', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities)
<commit_before>#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'manual', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities) <commit_msg>DanielWagnerHall: Fix test as well :) git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@17825 07704840-8298-11de-bf8c-fd130f914ac9<commit_after>
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'MANUAL', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities)
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'manual', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities) DanielWagnerHall: Fix test as well :) git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@17825 07704840-8298-11de-bf8c-fd130f914ac9#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'MANUAL', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities)
<commit_before>#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'manual', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities) <commit_msg>DanielWagnerHall: Fix test as well :) git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@17825 07704840-8298-11de-bf8c-fd130f914ac9<commit_after>#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy class ProxyTests(unittest.TestCase): def testCanAddToDesiredCapabilities(self): desired_capabilities = {} proxy = Proxy() proxy.http_proxy = 'some.url:1234' proxy.add_to_capabilities(desired_capabilities) expected_capabilities = { 'proxy': { 'proxyType': 'MANUAL', 'httpProxy': 'some.url:1234' } } self.assertEqual(expected_capabilities, desired_capabilities)