commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ddbf22b6e4d19c2b0c47543d6f4d7fe8fc704483 | errors.py | errors.py | """Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
| """Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
__str__ = __repr__
| Make __str__ = to repr | Make __str__ = to repr
| Python | bsd-3-clause | mmattice/TwistedSNMP | """Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
Make __str__ = to repr | """Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
__str__ = __repr__
| <commit_before>"""Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
<commit_msg>Make __str__ = to repr<commit_after> | """Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
__str__ = __repr__
| """Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
Make __str__ = to repr"""Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
__str__ = __repr__
| <commit_before>"""Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
<commit_msg>Make __str__ = to repr<commit_after>"""Errors specific to TwistedSNMP"""
noError = 0
tooBig = 1 # Response message would have been too large
noSuchName = 2 #There is no such variable name in this MIB
badValue = 3 # The value given has the wrong type or length
class OIDNameError( NameError ):
"""An OID was specified which is not defined in namespace"""
def __init__( self, oid, errorIndex=-1 , errorCode=noSuchName, message=""):
"""Initialise the OIDNameError"""
self.oid, self.errorIndex, self.errorCode, self.message = oid, errorIndex, errorCode, message
def __repr__( self ):
"""Represent the OIDNameError as a string"""
return """%s( %r, %s, %s, %r )"""%(
self.__class__.__name__,
self.oid,
self.errorIndex,
self.errorCode,
self.message,
)
__str__ = __repr__
|
73fad83b4c1d295611de23b300b67d80a39c9a13 | bot/api/call/call.py | bot/api/call/call.py | from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
return self.__handle_api_error(e, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
| from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.exceptions import ApiExceptionFactory
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
exception = ApiExceptionFactory.from_telegram_bot_api_exception(e)
return self.__handle_api_error(exception, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
| Convert TelegramBotApiException to a ApiException in ApiCall | Convert TelegramBotApiException to a ApiException in ApiCall
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
return self.__handle_api_error(e, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
Convert TelegramBotApiException to a ApiException in ApiCall | from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.exceptions import ApiExceptionFactory
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
exception = ApiExceptionFactory.from_telegram_bot_api_exception(e)
return self.__handle_api_error(exception, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
| <commit_before>from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
return self.__handle_api_error(e, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
<commit_msg>Convert TelegramBotApiException to a ApiException in ApiCall<commit_after> | from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.exceptions import ApiExceptionFactory
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
exception = ApiExceptionFactory.from_telegram_bot_api_exception(e)
return self.__handle_api_error(exception, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
| from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
return self.__handle_api_error(e, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
Convert TelegramBotApiException to a ApiException in ApiCallfrom bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.exceptions import ApiExceptionFactory
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
exception = ApiExceptionFactory.from_telegram_bot_api_exception(e)
return self.__handle_api_error(exception, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
| <commit_before>from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
return self.__handle_api_error(e, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
<commit_msg>Convert TelegramBotApiException to a ApiException in ApiCall<commit_after>from bot.api.call.params import ApiCallParams
from bot.api.domain import ApiObject
from bot.api.exceptions import ApiExceptionFactory
from bot.api.telegram import TelegramBotApiException
from bot.multithreading.work import Work
class ApiCall:
def __init__(self, api_func: callable, name: str):
self.api_func = api_func
self.name = name
def call(self, params: ApiCallParams):
api_call = lambda: self.__do_api_call_and_handle_error(params)
scheduler = params.scheduler
if scheduler:
scheduler(Work(api_call, "async_api_call:" + self.name))
else:
return api_call()
def __do_api_call_and_handle_error(self, params: ApiCallParams):
try:
return self.__do_api_call(params)
except TelegramBotApiException as e:
exception = ApiExceptionFactory.from_telegram_bot_api_exception(e)
return self.__handle_api_error(exception, params)
def __do_api_call(self, params: ApiCallParams):
return ApiObject.wrap_api_object(self.api_func(**params.send))
@staticmethod
def __handle_api_error(e, params: ApiCallParams):
error_callback = params.error_callback
if callable(error_callback):
return error_callback(e)
else:
raise e
|
fa2e48566bf532a2c72f9863444f3c7cff23a1c4 | github/commands/open_file_on_remote.py | github/commands/open_file_on_remote.py | from sublime_plugin import TextCommand
from ...common.file_and_repo import FileAndRepo
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, FileAndRepo):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
| from sublime_plugin import TextCommand
from ...core.base_command import BaseCommand
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, BaseCommand):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, edit, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
| Fix regression where unable to open file on remote. | Fix regression where unable to open file on remote.
| Python | mit | divmain/GitSavvy,ddevlin/GitSavvy,ddevlin/GitSavvy,ypersyntelykos/GitSavvy,stoivo/GitSavvy,theiviaxx/GitSavvy,ralic/GitSavvy,dreki/GitSavvy,ralic/GitSavvy,jmanuel1/GitSavvy,divmain/GitSavvy,stoivo/GitSavvy,dreki/GitSavvy,dvcrn/GitSavvy,asfaltboy/GitSavvy,stoivo/GitSavvy,theiviaxx/GitSavvy,asfaltboy/GitSavvy,ddevlin/GitSavvy,jmanuel1/GitSavvy,ypersyntelykos/GitSavvy,asfaltboy/GitSavvy,divmain/GitSavvy,dvcrn/GitSavvy | from sublime_plugin import TextCommand
from ...common.file_and_repo import FileAndRepo
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, FileAndRepo):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
Fix regression where unable to open file on remote. | from sublime_plugin import TextCommand
from ...core.base_command import BaseCommand
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, BaseCommand):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, edit, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
| <commit_before>from sublime_plugin import TextCommand
from ...common.file_and_repo import FileAndRepo
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, FileAndRepo):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
<commit_msg>Fix regression where unable to open file on remote.<commit_after> | from sublime_plugin import TextCommand
from ...core.base_command import BaseCommand
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, BaseCommand):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, edit, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
| from sublime_plugin import TextCommand
from ...common.file_and_repo import FileAndRepo
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, FileAndRepo):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
Fix regression where unable to open file on remote.from sublime_plugin import TextCommand
from ...core.base_command import BaseCommand
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, BaseCommand):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, edit, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
| <commit_before>from sublime_plugin import TextCommand
from ...common.file_and_repo import FileAndRepo
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, FileAndRepo):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
<commit_msg>Fix regression where unable to open file on remote.<commit_after>from sublime_plugin import TextCommand
from ...core.base_command import BaseCommand
from ..github import open_file_in_browser
class GsOpenFileOnRemoteCommand(TextCommand, BaseCommand):
"""
Open a new browser window to the web-version of the currently opened
(or specified) file. If `preselect` is `True`, include the selected
lines in the request.
At present, this only supports github.com and GitHub enterprise.
"""
def run(self, edit, preselect=False, fpath=None):
fpath = fpath or self.get_rel_path()
start_line = None
end_line = None
if preselect:
selections = self.view.sel()
if len(selections) >= 1:
first_selection = selections[0]
last_selection = selections[-1]
# Git lines are 1-indexed; Sublime rows are 0-indexed.
start_line = self.view.rowcol(first_selection.begin())[0] + 1
end_line = self.view.rowcol(last_selection.end())[0] + 1
default_name, default_remote_url = self.get_remotes().popitem(last=False)
open_file_in_browser(
fpath,
default_remote_url,
self.get_commit_hash_for_head(),
start_line=start_line,
end_line=end_line
)
|
ea875b1cecd154400381969c7c1b165dccd77db8 | httpony/application.py | httpony/application.py | from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
| from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
# The WSGI server puts content length and type in the environment
# even when not provided with the request. Drop them if they are empty.
if environ.get('CONTENT_LENGTH') == '':
del environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE') == '':
del environ['CONTENT_TYPE']
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
| Delete CONTENT_LENGTH and CONTENT_TYPE if empty. | Delete CONTENT_LENGTH and CONTENT_TYPE if empty.
For some reason, the WSGI server puts these things in the
environment even when they were not in the request.
Their presence messes up the output in those cases.
| Python | bsd-2-clause | mblayman/httpony | from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
Delete CONTENT_LENGTH and CONTENT_TYPE if empty.
For some reason, the WSGI server puts these things in the
environment even when they were not in the request.
Their presence messes up the output in those cases. | from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
# The WSGI server puts content length and type in the environment
# even when not provided with the request. Drop them if they are empty.
if environ.get('CONTENT_LENGTH') == '':
del environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE') == '':
del environ['CONTENT_TYPE']
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
| <commit_before>from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
<commit_msg>Delete CONTENT_LENGTH and CONTENT_TYPE if empty.
For some reason, the WSGI server puts these things in the
environment even when they were not in the request.
Their presence messes up the output in those cases.<commit_after> | from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
# The WSGI server puts content length and type in the environment
# even when not provided with the request. Drop them if they are empty.
if environ.get('CONTENT_LENGTH') == '':
del environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE') == '':
del environ['CONTENT_TYPE']
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
| from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
Delete CONTENT_LENGTH and CONTENT_TYPE if empty.
For some reason, the WSGI server puts these things in the
environment even when they were not in the request.
Their presence messes up the output in those cases.from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
# The WSGI server puts content length and type in the environment
# even when not provided with the request. Drop them if they are empty.
if environ.get('CONTENT_LENGTH') == '':
del environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE') == '':
del environ['CONTENT_TYPE']
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
| <commit_before>from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
<commit_msg>Delete CONTENT_LENGTH and CONTENT_TYPE if empty.
For some reason, the WSGI server puts these things in the
environment even when they were not in the request.
Their presence messes up the output in those cases.<commit_after>from __future__ import print_function
from httpie.cli import parser
from httpie.context import Environment
from httpie.output import streams
from requests.models import Request
from werkzeug.wrappers import Response
from werkzeug.wrappers import Request as WerkzeugRequest
from httpony import __version__
def make_app():
"""Make a WSGI app that has all the HTTPie pieces baked in."""
env = Environment()
args = parser.parse_args(args=['/'], env=env)
args.output_options = 'HB' # Output only requests.
server = 'HTTPony/{0}'.format(__version__)
def application(environ, start_response):
# The WSGI server puts content length and type in the environment
# even when not provided with the request. Drop them if they are empty.
if environ.get('CONTENT_LENGTH') == '':
del environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE') == '':
del environ['CONTENT_TYPE']
wrequest = WerkzeugRequest(environ)
data = wrequest.get_data()
request = Request(
method=wrequest.method,
url=wrequest.url,
headers=wrequest.headers,
data=data,
)
prepared = request.prepare()
stream = streams.build_output_stream(args, env, prepared, response=None)
streams.write(stream, env.stdout, env.stdout_isatty)
# When there is data in the request, give the next one breathing room.
if data:
print("\n", file=env.stdout)
# Make dreams come true.
response = Response(headers={'Server': server})
return response(environ, start_response)
return application
|
1f105b7ecd6770ac0704329bda3f149c05878da3 | tests/test_utilities/test_in2csv.py | tests/test_utilities/test_in2csv.py | #!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
from csvkit.utilities.csvstat import CSVStat
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
| #!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
| Remove extraneous import in test. | Remove extraneous import in test.
| Python | mit | themiurgo/csvkit,barentsen/csvkit,matterker/csvkit,dannguyen/csvkit,tlevine/csvkit,wjr1985/csvkit,doganmeh/csvkit,bradparks/csvkit__query_join_filter_CSV_cli,cypreess/csvkit,KarrieK/csvkit,archaeogeek/csvkit,Jobava/csvkit,jpalvarezf/csvkit,unpingco/csvkit,nriyer/csvkit,arowla/csvkit,haginara/csvkit,Tabea-K/csvkit,snuggles08/csvkit,metasoarous/csvkit,moradology/csvkit,wireservice/csvkit,kyeoh/csvkit,reubano/csvkit,aequitas/csvkit,bmispelon/csvkit,gepuro/csvkit,onyxfish/csvkit,elcritch/csvkit | #!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
from csvkit.utilities.csvstat import CSVStat
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
Remove extraneous import in test. | #!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
| <commit_before>#!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
from csvkit.utilities.csvstat import CSVStat
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
<commit_msg>Remove extraneous import in test.<commit_after> | #!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
| #!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
from csvkit.utilities.csvstat import CSVStat
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
Remove extraneous import in test.#!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
| <commit_before>#!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
from csvkit.utilities.csvstat import CSVStat
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
<commit_msg>Remove extraneous import in test.<commit_after>#!/usr/bin/env python
import unittest
import StringIO
from csvkit.utilities.in2csv import In2CSV
class TestIn2CSV(unittest.TestCase):
def test_convert_xls(self):
args = ['-f', 'xls', 'examples/test.xls']
output_file = StringIO.StringIO()
utility = In2CSV(args, output_file)
utility.main()
target_output = open('examples/testxls_converted.csv', 'r').read()
self.assertEqual(output_file.getvalue(), target_output)
|
6f265e37361b1447cf55c5d79cfe3ba6b6047b57 | tests/examples/helloworld/flows.py | tests/examples/helloworld/flows.py | from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
| from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
| Fix hello world sample flow | Fix hello world sample flow
| Python | agpl-3.0 | ribeiro-ucl/viewflow,ribeiro-ucl/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,viewflow/viewflow,codingjoe/viewflow,codingjoe/viewflow,pombredanne/viewflow,pombredanne/viewflow,viewflow/viewflow,viewflow/viewflow | from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
Fix hello world sample flow | from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
| <commit_before>from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
<commit_msg>Fix hello world sample flow<commit_after> | from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
| from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
Fix hello world sample flowfrom viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
| <commit_before>from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.send)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
<commit_msg>Fix hello world sample flow<commit_after>from viewflow import flow, lock, views as flow_views
from viewflow.base import this, Flow
from viewflow.site import viewsite
from .models import HelloWorldProcess
from .tasks import send_hello_world_request
class HelloWorldFlow(Flow):
"""
Hello world
This process demonstrates hello world approval request flow.
1. User with *helloworld.can_start_process* permission creates hello world request
2. Manager, who have *helloworld.can_approve_request* approves it
3. And if request was approved, background celery job sends it to the world
4. Elsewhere, request became cancelled
"""
process_cls = HelloWorldProcess
lock_impl = lock.select_for_update_lock
start = flow.Start(flow_views.StartProcessView, fields=['text']) \
.Permission(auto_create=True) \
.Next(this.approve)
approve = flow.View(flow_views.ProcessView, fields=['approved']) \
.Permission(auto_create=True) \
.Next(this.check_approve)
check_approve = flow.If(cond=lambda p: p.approved) \
.OnTrue(this.send) \
.OnFalse(this.end)
send = flow.Job(send_hello_world_request) \
.Next(this.end)
end = flow.End()
viewsite.register(HelloWorldFlow)
|
710291dd51c0a2950d616579f2442e6a81ee1670 | update.py | update.py | from datetime import datetime, timedelta
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9) for which in ('DTSTART', 'DTEND')]
# TODO: convert to PT
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
for data in ordered_event_data:
print str(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| from datetime import datetime, timedelta
from pprint import pformat
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9)
for which in ('DTSTART', 'DTEND')]
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
with open('giants_schedule.py', 'w') as outfile:
outfile.write('import datetime\n\n\n')
outfile.write('schedule = ')
outfile.write(pformat(tuple(ordered_event_data)))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| Write the formatted date data out as a module | Write the formatted date data out as a module
| Python | mit | markpasc/isthereagiantsgame | from datetime import datetime, timedelta
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9) for which in ('DTSTART', 'DTEND')]
# TODO: convert to PT
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
for data in ordered_event_data:
print str(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
Write the formatted date data out as a module | from datetime import datetime, timedelta
from pprint import pformat
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9)
for which in ('DTSTART', 'DTEND')]
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
with open('giants_schedule.py', 'w') as outfile:
outfile.write('import datetime\n\n\n')
outfile.write('schedule = ')
outfile.write(pformat(tuple(ordered_event_data)))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| <commit_before>from datetime import datetime, timedelta
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9) for which in ('DTSTART', 'DTEND')]
# TODO: convert to PT
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
for data in ordered_event_data:
print str(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
<commit_msg>Write the formatted date data out as a module<commit_after> | from datetime import datetime, timedelta
from pprint import pformat
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9)
for which in ('DTSTART', 'DTEND')]
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
with open('giants_schedule.py', 'w') as outfile:
outfile.write('import datetime\n\n\n')
outfile.write('schedule = ')
outfile.write(pformat(tuple(ordered_event_data)))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| from datetime import datetime, timedelta
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9) for which in ('DTSTART', 'DTEND')]
# TODO: convert to PT
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
for data in ordered_event_data:
print str(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
Write the formatted date data out as a modulefrom datetime import datetime, timedelta
from pprint import pformat
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9)
for which in ('DTSTART', 'DTEND')]
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
with open('giants_schedule.py', 'w') as outfile:
outfile.write('import datetime\n\n\n')
outfile.write('schedule = ')
outfile.write(pformat(tuple(ordered_event_data)))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| <commit_before>from datetime import datetime, timedelta
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9) for which in ('DTSTART', 'DTEND')]
# TODO: convert to PT
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
for data in ordered_event_data:
print str(data)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
<commit_msg>Write the formatted date data out as a module<commit_after>from datetime import datetime, timedelta
from pprint import pformat
import sys
from icalendar import Calendar
def data_for_vevent(ev):
start_date, end_date = [ev[which].dt.replace(tzinfo=None) + timedelta(hours=-9)
for which in ('DTSTART', 'DTEND')]
return (start_date.date(), str(ev['SUMMARY']), start_date, end_date)
def main(argv):
# TODO: specify filename?
cal_str = open('ical.ics').read()
cal = Calendar.from_string(cal_str)
vevents = (ev for ev in cal.walk() if ev.name == 'VEVENT')
event_datas = (data_for_vevent(ev) for ev in vevents)
ordered_event_data = sorted(event_datas, key=lambda d: d[0])
with open('giants_schedule.py', 'w') as outfile:
outfile.write('import datetime\n\n\n')
outfile.write('schedule = ')
outfile.write(pformat(tuple(ordered_event_data)))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
ac2be16f952a40710610e0368ce195b73b92b611 | core/management/commands/ticker/exchangerate.py | core/management/commands/ticker/exchangerate.py | import decimal
import logging
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except Exception as e:
logger.warning("Unhandled exception: %s" % e)
logger.warning("Ignoring and trying to re-fetch exchange rate...")
| import decimal
import logging
import sys
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except:
# Likely a problem with Yahoo's service; log a warning and retry
# Try to include the response text in the log data if it is available
try:
extra = {'response': csv}
except NameError:
extra = {}
logger.warning(
"Couldn't look up USD/NOK exchange rate; ignoring and re-fetching instantly...",
exc_info=sys.exc_info(),
extra=extra,
)
| Include more exception data on exchange rate failure | Include more exception data on exchange rate failure
| Python | unlicense | kvikshaug/btc.kvikshaug.no,kvikshaug/btc.kvikshaug.no,kvikshaug/btc.kvikshaug.no,kvikshaug/btc.kvikshaug.no | import decimal
import logging
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except Exception as e:
logger.warning("Unhandled exception: %s" % e)
logger.warning("Ignoring and trying to re-fetch exchange rate...")
Include more exception data on exchange rate failure | import decimal
import logging
import sys
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except:
# Likely a problem with Yahoo's service; log a warning and retry
# Try to include the response text in the log data if it is available
try:
extra = {'response': csv}
except NameError:
extra = {}
logger.warning(
"Couldn't look up USD/NOK exchange rate; ignoring and re-fetching instantly...",
exc_info=sys.exc_info(),
extra=extra,
)
| <commit_before>import decimal
import logging
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except Exception as e:
logger.warning("Unhandled exception: %s" % e)
logger.warning("Ignoring and trying to re-fetch exchange rate...")
<commit_msg>Include more exception data on exchange rate failure<commit_after> | import decimal
import logging
import sys
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except:
# Likely a problem with Yahoo's service; log a warning and retry
# Try to include the response text in the log data if it is available
try:
extra = {'response': csv}
except NameError:
extra = {}
logger.warning(
"Couldn't look up USD/NOK exchange rate; ignoring and re-fetching instantly...",
exc_info=sys.exc_info(),
extra=extra,
)
| import decimal
import logging
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except Exception as e:
logger.warning("Unhandled exception: %s" % e)
logger.warning("Ignoring and trying to re-fetch exchange rate...")
Include more exception data on exchange rate failureimport decimal
import logging
import sys
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except:
# Likely a problem with Yahoo's service; log a warning and retry
# Try to include the response text in the log data if it is available
try:
extra = {'response': csv}
except NameError:
extra = {}
logger.warning(
"Couldn't look up USD/NOK exchange rate; ignoring and re-fetching instantly...",
exc_info=sys.exc_info(),
extra=extra,
)
| <commit_before>import decimal
import logging
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except Exception as e:
logger.warning("Unhandled exception: %s" % e)
logger.warning("Ignoring and trying to re-fetch exchange rate...")
<commit_msg>Include more exception data on exchange rate failure<commit_after>import decimal
import logging
import sys
import threading
import requests
logger = logging.getLogger('btc.priceticker.exchangerate')
class ExchangeRate(threading.Thread):
YAHOO_FINANCE_URL = "https://download.finance.yahoo.com/d/quotes.csv"
YAHOO_FINANCE_PARAMS = {'e': '.csv', 'f': 'sl1d1t1', 's': 'USDNOK=X'}
SLEEP_TIME = 60 * 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.rate = None
self.stop_event = threading.Event()
self.start()
def get_rate(self):
return self.rate
def stop(self):
self.stop_event.set()
def run(self):
while not self.stop_event.is_set():
try:
logger.debug("Fetching exchange rates from Yahoo Finance...")
csv = requests.get(ExchangeRate.YAHOO_FINANCE_URL, params=ExchangeRate.YAHOO_FINANCE_PARAMS).text
name, rate, date, time = csv.split(',')
self.rate = decimal.Decimal(rate)
self.stop_event.wait(ExchangeRate.SLEEP_TIME)
except:
# Likely a problem with Yahoo's service; log a warning and retry
# Try to include the response text in the log data if it is available
try:
extra = {'response': csv}
except NameError:
extra = {}
logger.warning(
"Couldn't look up USD/NOK exchange rate; ignoring and re-fetching instantly...",
exc_info=sys.exc_info(),
extra=extra,
)
|
f832c047acf95e4a9f426eb2e3a174db025325a4 | test/runalltest.py | test/runalltest.py | import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
| import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.flush()
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
sys.stderr.flush()
sys.stderr.write("\nrunning %s" % test.__name__)
sys.stderr.flush()
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
| Print test names when running full testsuite | Print test names when running full testsuite | Python | bsd-2-clause | pressel/mpi4py,mpi4py/mpi4py,mpi4py/mpi4py,pressel/mpi4py,pressel/mpi4py,pressel/mpi4py,mpi4py/mpi4py | import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
Print test names when running full testsuite | import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.flush()
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
sys.stderr.flush()
sys.stderr.write("\nrunning %s" % test.__name__)
sys.stderr.flush()
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
| <commit_before>import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
<commit_msg>Print test names when running full testsuite<commit_after> | import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.flush()
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
sys.stderr.flush()
sys.stderr.write("\nrunning %s" % test.__name__)
sys.stderr.flush()
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
| import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
Print test names when running full testsuiteimport sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.flush()
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
sys.stderr.flush()
sys.stderr.write("\nrunning %s" % test.__name__)
sys.stderr.flush()
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
| <commit_before>import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
<commit_msg>Print test names when running full testsuite<commit_after>import sys, os
import unittest
try: # use the 'installed' mpi4py
import mpi4py
except ImportError: # or the no yet installed mpi4py
from distutils.util import get_platform
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
os.path.split(__file__)[0]
path = os.path.join(os.path.split(__file__)[0], os.path.pardir,
'build', 'lib' + plat_specifier)
path = os.path.normpath(path)
sys.path.insert(0, path)
import mpi4py
from mpi4py import MPI
sys.stderr.flush()
sys.stderr.write("mpi4py imported from '%s'\n" % mpi4py.__path__[0])
sys.stderr.flush()
# make sure we are using the Cython-based version
assert os.path.splitext(MPI.__file__)[1] not in ('.py', '.pyc', '.pyo')
testpath = os.path.split(__file__)[0]
sys.path.insert(0, testpath)
import mpiunittest
alltests = mpiunittest.find_tests(
exclude=[
]
)
def runtests(*args, **kargs):
for test in alltests:
sys.stderr.flush()
sys.stderr.write("\nrunning %s" % test.__name__)
sys.stderr.flush()
mpiunittest.main(test, *args, **kargs)
if __name__ == '__main__':
runtests()
|
2212d1b943987652f4a6a575e3e88dc3e174ce7c | eigen/3.2/conanfile.py | eigen/3.2/conanfile.py | from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
| from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
| Add --insecure option to hg clone to avoid self-assigned certificate issue | Add --insecure option to hg clone to avoid self-assigned certificate issue
| Python | bsd-2-clause | jslee02/conan-dart,jslee02/conan-dart,jslee02/conan-dart | from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
Add --insecure option to hg clone to avoid self-assigned certificate issue | from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
| <commit_before>from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
<commit_msg>Add --insecure option to hg clone to avoid self-assigned certificate issue<commit_after> | from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
| from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
Add --insecure option to hg clone to avoid self-assigned certificate issuefrom conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
| <commit_before>from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
<commit_msg>Add --insecure option to hg clone to avoid self-assigned certificate issue<commit_after>from conans import ConanFile
import os
class EigenConan(ConanFile):
name = "eigen"
version = "3.2"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=True"
exports = "eigen/*"
url="https://github.com/jslee02/conan-dart/tree/master/eigen/3.2"
def source(self):
self.run('hg clone --insecure https://bitbucket.org/eigen/eigen -u 3.2.7')
self.run('cd eigen')
def package(self):
self.copy("*", dst="include/Eigen", src="eigen/Eigen")
self.copy("*", dst="include/unsupported", src="eigen/unsupported")
|
33915344c52ba7ea95cbe56668f55b1ba520af10 | fortuitus/fcore/models.py | fortuitus/fcore/models.py | from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
| from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
| Use receiver decorator instead of `signal.connect` | Use receiver decorator instead of `signal.connect`
I've contributed to this decorator upstream! It should be used. :-)
| Python | mit | elegion/djangodash2012,elegion/djangodash2012 | from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
Use receiver decorator instead of `signal.connect`
I've contributed to this decorator upstream! It should be used. :-) | from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
| <commit_before>from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
<commit_msg>Use receiver decorator instead of `signal.connect`
I've contributed to this decorator upstream! It should be used. :-)<commit_after> | from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
| from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
Use receiver decorator instead of `signal.connect`
I've contributed to this decorator upstream! It should be used. :-)from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
| <commit_before>from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
<commit_msg>Use receiver decorator instead of `signal.connect`
I've contributed to this decorator upstream! It should be used. :-)<commit_after>from autoslug.fields import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
class Company(models.Model):
""" Organization. """
slug = AutoSlugField(populate_from='name', unique=True)
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class FortuitusProfile(models.Model):
""" User profile. """
user = models.OneToOneField(User)
# TODO: support multiple organizations.
company = models.ForeignKey(Company, null=True, blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
""" User post_save signal handler, creates user profile instance. """
if created:
FortuitusProfile.objects.create(user=instance)
|
bd1346a318f8f8d553a3fbf0a353ef6d68102566 | twitter/twitter_globals.py | twitter/twitter_globals.py | '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup',
# Semantic Methods
'filter',
# OAuth Methods
'token',
]
| '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session', 'settings',
'update_profile_banner', 'remove_profile_banner',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy', 'destroy_all',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup', 'report_spam',
# Geo Methods
'place',
# Streaming Methods
'filter', 'user', 'site',
# OAuth Methods
'token', 'access_token',
'request_token', 'invalidate_token',
]
| Use POST for all methods requiring it in specs | Use POST for all methods requiring it in specs
Added all missing methods from https://dev.twitter.com/docs/api/1.1
Also included some of the streaming methods which work with both GET
and POST but accept arguments like "track" which can quickly require
POST.
(Closes #187 #145 #188)
| Python | mit | adonoho/twitter,Adai0808/twitter,jessamynsmith/twitter,hugovk/twitter,sixohsix/twitter,tytek2012/twitter,miragshin/twitter | '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup',
# Semantic Methods
'filter',
# OAuth Methods
'token',
]
Use POST for all methods requiring it in specs
Added all missing methods from https://dev.twitter.com/docs/api/1.1
Also included some of the streaming methods which work with both GET
and POST but accept arguments like "track" which can quickly require
POST.
(Closes #187 #145 #188) | '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session', 'settings',
'update_profile_banner', 'remove_profile_banner',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy', 'destroy_all',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup', 'report_spam',
# Geo Methods
'place',
# Streaming Methods
'filter', 'user', 'site',
# OAuth Methods
'token', 'access_token',
'request_token', 'invalidate_token',
]
| <commit_before>'''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup',
# Semantic Methods
'filter',
# OAuth Methods
'token',
]
<commit_msg>Use POST for all methods requiring it in specs
Added all missing methods from https://dev.twitter.com/docs/api/1.1
Also included some of the streaming methods which work with both GET
and POST but accept arguments like "track" which can quickly require
POST.
(Closes #187 #145 #188)<commit_after> | '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session', 'settings',
'update_profile_banner', 'remove_profile_banner',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy', 'destroy_all',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup', 'report_spam',
# Geo Methods
'place',
# Streaming Methods
'filter', 'user', 'site',
# OAuth Methods
'token', 'access_token',
'request_token', 'invalidate_token',
]
| '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup',
# Semantic Methods
'filter',
# OAuth Methods
'token',
]
Use POST for all methods requiring it in specs
Added all missing methods from https://dev.twitter.com/docs/api/1.1
Also included some of the streaming methods which work with both GET
and POST but accept arguments like "track" which can quickly require
POST.
(Closes #187 #145 #188)'''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session', 'settings',
'update_profile_banner', 'remove_profile_banner',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy', 'destroy_all',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup', 'report_spam',
# Geo Methods
'place',
# Streaming Methods
'filter', 'user', 'site',
# OAuth Methods
'token', 'access_token',
'request_token', 'invalidate_token',
]
| <commit_before>'''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup',
# Semantic Methods
'filter',
# OAuth Methods
'token',
]
<commit_msg>Use POST for all methods requiring it in specs
Added all missing methods from https://dev.twitter.com/docs/api/1.1
Also included some of the streaming methods which work with both GET
and POST but accept arguments like "track" which can quickly require
POST.
(Closes #187 #145 #188)<commit_after>'''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session', 'settings',
'update_profile_banner', 'remove_profile_banner',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy', 'destroy_all',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup', 'report_spam',
# Geo Methods
'place',
# Streaming Methods
'filter', 'user', 'site',
# OAuth Methods
'token', 'access_token',
'request_token', 'invalidate_token',
]
|
2ae93662f9f978dfae98096701b8e2e2a135f5a5 | rejected/__init__.py | rejected/__init__.py | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
| """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
logging.getLogger('rejected.consumer').addHandler(NullHandler())
| Add an additional null handler | Add an additional null handler
| Python | bsd-3-clause | gmr/rejected,gmr/rejected | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
Add an additional null handler | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
logging.getLogger('rejected.consumer').addHandler(NullHandler())
| <commit_before>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
<commit_msg>Add an additional null handler<commit_after> | """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
logging.getLogger('rejected.consumer').addHandler(NullHandler())
| """
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
Add an additional null handler"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
logging.getLogger('rejected.consumer').addHandler(NullHandler())
| <commit_before>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
<commit_msg>Add an additional null handler<commit_after>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.4.2"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('rejected').addHandler(NullHandler())
logging.getLogger('rejected.consumer').addHandler(NullHandler())
|
b75153ad49280ce793a995fca4a34d0688d63cb4 | tests/unit/checkout/mixins_tests.py | tests/unit/checkout/mixins_tests.py | import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
| import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
| Add tests for CheckoutSessionMixin.check_basket_is_valid method. | Add tests for CheckoutSessionMixin.check_basket_is_valid method.
| Python | bsd-3-clause | django-oscar/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar | import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
Add tests for CheckoutSessionMixin.check_basket_is_valid method. | import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
| <commit_before>import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
<commit_msg>Add tests for CheckoutSessionMixin.check_basket_is_valid method.<commit_after> | import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
| import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
Add tests for CheckoutSessionMixin.check_basket_is_valid method.import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
| <commit_before>import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
<commit_msg>Add tests for CheckoutSessionMixin.check_basket_is_valid method.<commit_after>import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
|
e066eae6bb0f9d555a53f9ee2901c77ffebd3647 | tracer/cachemanager/cachemanager.py | tracer/cachemanager/cachemanager.py | import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
state = self.tracer.previous.state
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
| import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
if self.tracer.previous != None:
state = self.tracer.previous.state
else:
state = None
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
| Fix a bug in the cache manager | Fix a bug in the cache manager
It is possible that the previous state is None
| Python | bsd-2-clause | schieb/angr,schieb/angr,angr/angr,angr/angr,tyb0807/angr,iamahuman/angr,f-prettyland/angr,tyb0807/angr,iamahuman/angr,iamahuman/angr,tyb0807/angr,angr/angr,schieb/angr,angr/tracer,f-prettyland/angr,f-prettyland/angr | import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
state = self.tracer.previous.state
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
Fix a bug in the cache manager
It is possible that the previous state is None | import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
if self.tracer.previous != None:
state = self.tracer.previous.state
else:
state = None
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
| <commit_before>import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
state = self.tracer.previous.state
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
<commit_msg>Fix a bug in the cache manager
It is possible that the previous state is None<commit_after> | import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
if self.tracer.previous != None:
state = self.tracer.previous.state
else:
state = None
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
| import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
state = self.tracer.previous.state
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
Fix a bug in the cache manager
It is possible that the previous state is Noneimport pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
if self.tracer.previous != None:
state = self.tracer.previous.state
else:
state = None
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
| <commit_before>import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
state = self.tracer.previous.state
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
<commit_msg>Fix a bug in the cache manager
It is possible that the previous state is None<commit_after>import pickle
import claripy
import logging
from ..simprocedures import receive
l = logging.getLogger("tracer.cachemanager.CacheManager")
class CacheManager(object):
def __init__(self):
self.tracer = None
def set_tracer(self, tracer):
self.tracer = tracer
def cacher(self, simstate):
raise NotImplementedError("subclasses must implement this method")
def cache_lookup(self):
raise NotImplementedError("subclasses must implement this method")
def _prepare_cache_data(self, simstate):
if self.tracer.previous != None:
state = self.tracer.previous.state
else:
state = None
ds = None
try:
ds = pickle.dumps((self.tracer.bb_cnt - 1, self.tracer.cgc_flag_bytes, state, claripy.ast.base.var_counter), pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("unable to cache state, '%s' during pickling", e.message)
# unhook receive
receive.cache_hook = None
# add preconstraints to tracer
self.tracer._preconstrain_state(simstate)
return ds
|
01983a9c8fab1556aac524e7b3000461ff7c0b5d | txircd/modules/core/channellevel.py | txircd/modules/core/channellevel.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel() | Make ChannelLevel actually a valid module | Make ChannelLevel actually a valid module
| Python | bsd-3-clause | Heufneutje/txircd,ElementalAlchemist/txircd | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel()Make ChannelLevel actually a valid module | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel() | <commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel()<commit_msg>Make ChannelLevel actually a valid module<commit_after> | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel()Make ChannelLevel actually a valid modulefrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel() | <commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel()<commit_msg>Make ChannelLevel actually a valid module<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ChannelLevel(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelLevel"
core = True
def actions(self):
return [ ("checkchannellevel", 1, self.levelCheck),
("checkexemptchanops", 1, self.exemptCheck) ]
def minLevelFromConfig(self, configKey, checkType, defaultLevel):
configLevel = self.ircd.config.get(configKey, {}).get(checkType, defaultLevel)
try:
minLevel = int(configLevel)
except ValueError:
if configLevel not in self.ircd.channelStatuses:
return False # If the status doesn't exist, then, to be safe, we must assume NOBODY is above the line.
minLevel = self.ircd.channelStatuses[configLevel][1]
return minLevel
def levelCheck(self, levelType, channel, user):
minLevel = self.minLevelFromConfig("channel_minimum_level", levelType, 100)
return channel.userRank(user) >= minLevel
def exemptCheck(self, exemptType, channel, user):
minLevel = self.minLevelFromConfig("channel_exempt_level", exemptType, 0)
if not minLevel:
return False # No minimum level == no exemptions
return channel.userRank(user) >= minLevel
chanLevel = ChannelLevel() |
6bf971e4248d480594bf10b8f446bf30a5b16072 | scripts/cuba_enum_generator.py | scripts/cuba_enum_generator.py | from scripts.old_single_meta_class_generator import CUBA_DATA_CONTAINER_EXCLUDE
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
if keyword in CUBA_DATA_CONTAINER_EXCLUDE:
continue
lines.append(template.format(keyword, keyword))
output.writelines(lines)
| from scripts import utils
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
lines.append(template.format(keyword,
utils.with_cuba_prefix(keyword)))
output.writelines(lines)
| Put prefix to enum values. Removed old hack. | Put prefix to enum values. Removed old hack.
| Python | bsd-2-clause | simphony/simphony-common | from scripts.old_single_meta_class_generator import CUBA_DATA_CONTAINER_EXCLUDE
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
if keyword in CUBA_DATA_CONTAINER_EXCLUDE:
continue
lines.append(template.format(keyword, keyword))
output.writelines(lines)
Put prefix to enum values. Removed old hack. | from scripts import utils
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
lines.append(template.format(keyword,
utils.with_cuba_prefix(keyword)))
output.writelines(lines)
| <commit_before>from scripts.old_single_meta_class_generator import CUBA_DATA_CONTAINER_EXCLUDE
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
if keyword in CUBA_DATA_CONTAINER_EXCLUDE:
continue
lines.append(template.format(keyword, keyword))
output.writelines(lines)
<commit_msg>Put prefix to enum values. Removed old hack.<commit_after> | from scripts import utils
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
lines.append(template.format(keyword,
utils.with_cuba_prefix(keyword)))
output.writelines(lines)
| from scripts.old_single_meta_class_generator import CUBA_DATA_CONTAINER_EXCLUDE
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
if keyword in CUBA_DATA_CONTAINER_EXCLUDE:
continue
lines.append(template.format(keyword, keyword))
output.writelines(lines)
Put prefix to enum values. Removed old hack.from scripts import utils
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
lines.append(template.format(keyword,
utils.with_cuba_prefix(keyword)))
output.writelines(lines)
| <commit_before>from scripts.old_single_meta_class_generator import CUBA_DATA_CONTAINER_EXCLUDE
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
if keyword in CUBA_DATA_CONTAINER_EXCLUDE:
continue
lines.append(template.format(keyword, keyword))
output.writelines(lines)
<commit_msg>Put prefix to enum values. Removed old hack.<commit_after>from scripts import utils
class CUBAEnumGenerator(object):
"""Generator class for cuba.py enumeration.
"""
def generate(self, cuba_dict, simphony_metadata_dict, output):
"""Generates the cuba file from the yaml-extracted dictionary
of cuba and simphony_metadata files. Writes the generated code
in the file object output
"""
lines = [
'# code auto-generated by the\n',
'# simphony-metadata/scripts/generate.py script.\n',
'# cuba.yml VERSION: {}\n'.format(cuba_dict['VERSION']),
'from enum import Enum, unique\n',
'\n',
'\n',
'@unique\n',
'class CUBA(Enum):\n'
]
template = ' {} = "{}"\n'
all_keys = set(
cuba_dict['CUBA_KEYS']) | set(simphony_metadata_dict['CUDS_KEYS'])
for keyword in sorted(list(all_keys)):
lines.append(template.format(keyword,
utils.with_cuba_prefix(keyword)))
output.writelines(lines)
|
40b4e156a72dd09d752b6ba3adeec7e28ca127a8 | crawler/collector.py | crawler/collector.py | #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
import json
def collect(crawl_id, result_queue, log):
with dataset.connect(DATABASE_URL) as db:
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
page_url, data = None, None
if result:
[(page_url, data)] = result.items()
data = json.dumps(data)
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
data=data
))
log("Collecting finished.")
| #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
def collect(crawl_id, result_queue, log):
db = dataset.connect(DATABASE_URL)
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
if not result:
with db:
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url
))
continue
for page_url, page_data in result.items():
for domain, ddata in page_data['domains'].items():
for script_url, sdata in ddata['scripts'].items():
with db:
result_id = db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
domain=domain,
script_url=script_url,
canvas=sdata['canvas']['fingerprinting'],
font_enum=sdata['fontEnumeration'],
navigator_enum=sdata['navigatorEnumeration']
))
# property access counts get saved in `property_count`
rows = []
for property, count in sdata['counts'].items():
rows.append(dict(
result_id=result_id,
property=property,
count=count
))
with db:
db['property_count'].insert_many(rows)
log("Collecting finished.")
| Convert JSON blob to relational db structure. | Convert JSON blob to relational db structure.
| Python | mpl-2.0 | ghostwords/chameleon-crawler,ghostwords/chameleon-crawler,ghostwords/chameleon-crawler | #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
import json
def collect(crawl_id, result_queue, log):
with dataset.connect(DATABASE_URL) as db:
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
page_url, data = None, None
if result:
[(page_url, data)] = result.items()
data = json.dumps(data)
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
data=data
))
log("Collecting finished.")
Convert JSON blob to relational db structure. | #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
def collect(crawl_id, result_queue, log):
db = dataset.connect(DATABASE_URL)
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
if not result:
with db:
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url
))
continue
for page_url, page_data in result.items():
for domain, ddata in page_data['domains'].items():
for script_url, sdata in ddata['scripts'].items():
with db:
result_id = db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
domain=domain,
script_url=script_url,
canvas=sdata['canvas']['fingerprinting'],
font_enum=sdata['fontEnumeration'],
navigator_enum=sdata['navigatorEnumeration']
))
# property access counts get saved in `property_count`
rows = []
for property, count in sdata['counts'].items():
rows.append(dict(
result_id=result_id,
property=property,
count=count
))
with db:
db['property_count'].insert_many(rows)
log("Collecting finished.")
| <commit_before>#!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
import json
def collect(crawl_id, result_queue, log):
with dataset.connect(DATABASE_URL) as db:
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
page_url, data = None, None
if result:
[(page_url, data)] = result.items()
data = json.dumps(data)
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
data=data
))
log("Collecting finished.")
<commit_msg>Convert JSON blob to relational db structure.<commit_after> | #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
def collect(crawl_id, result_queue, log):
db = dataset.connect(DATABASE_URL)
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
if not result:
with db:
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url
))
continue
for page_url, page_data in result.items():
for domain, ddata in page_data['domains'].items():
for script_url, sdata in ddata['scripts'].items():
with db:
result_id = db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
domain=domain,
script_url=script_url,
canvas=sdata['canvas']['fingerprinting'],
font_enum=sdata['fontEnumeration'],
navigator_enum=sdata['navigatorEnumeration']
))
# property access counts get saved in `property_count`
rows = []
for property, count in sdata['counts'].items():
rows.append(dict(
result_id=result_id,
property=property,
count=count
))
with db:
db['property_count'].insert_many(rows)
log("Collecting finished.")
| #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
import json
def collect(crawl_id, result_queue, log):
with dataset.connect(DATABASE_URL) as db:
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
page_url, data = None, None
if result:
[(page_url, data)] = result.items()
data = json.dumps(data)
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
data=data
))
log("Collecting finished.")
Convert JSON blob to relational db structure.#!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
def collect(crawl_id, result_queue, log):
db = dataset.connect(DATABASE_URL)
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
if not result:
with db:
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url
))
continue
for page_url, page_data in result.items():
for domain, ddata in page_data['domains'].items():
for script_url, sdata in ddata['scripts'].items():
with db:
result_id = db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
domain=domain,
script_url=script_url,
canvas=sdata['canvas']['fingerprinting'],
font_enum=sdata['fontEnumeration'],
navigator_enum=sdata['navigatorEnumeration']
))
# property access counts get saved in `property_count`
rows = []
for property, count in sdata['counts'].items():
rows.append(dict(
result_id=result_id,
property=property,
count=count
))
with db:
db['property_count'].insert_many(rows)
log("Collecting finished.")
| <commit_before>#!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
import json
def collect(crawl_id, result_queue, log):
with dataset.connect(DATABASE_URL) as db:
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
page_url, data = None, None
if result:
[(page_url, data)] = result.items()
data = json.dumps(data)
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
data=data
))
log("Collecting finished.")
<commit_msg>Convert JSON blob to relational db structure.<commit_after>#!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from time import sleep
from .utils import DATABASE_URL
import dataset
def collect(crawl_id, result_queue, log):
db = dataset.connect(DATABASE_URL)
while True:
if result_queue.empty():
sleep(0.01)
continue
result = result_queue.get()
if result is None:
break
crawl_url, result = result
if not result:
with db:
db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url
))
continue
for page_url, page_data in result.items():
for domain, ddata in page_data['domains'].items():
for script_url, sdata in ddata['scripts'].items():
with db:
result_id = db['result'].insert(dict(
crawl_id=crawl_id,
crawl_url=crawl_url,
page_url=page_url,
domain=domain,
script_url=script_url,
canvas=sdata['canvas']['fingerprinting'],
font_enum=sdata['fontEnumeration'],
navigator_enum=sdata['navigatorEnumeration']
))
# property access counts get saved in `property_count`
rows = []
for property, count in sdata['counts'].items():
rows.append(dict(
result_id=result_id,
property=property,
count=count
))
with db:
db['property_count'].insert_many(rows)
log("Collecting finished.")
|
fc9cd61f97924a1e3daf053319e9b49a73b58c80 | dploy/__init__.py | dploy/__init__.py | """
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest):
"""
sub command stow
"""
main.Stow(sources, dest) # pylint: disable=protected-access
def unstow(sources, dest):
"""
sub command unstow
"""
main.UnStow(sources, dest) # pylint: disable=protected-access
def link(source, dest):
"""
sub command link
"""
main.Link(source, dest) # pylint: disable=protected-access
| """
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command stow
"""
main.Stow(sources, dest, is_silent, is_dry_run)
def unstow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command unstow
"""
main.UnStow(sources, dest, is_silent, is_dry_run)
def link(source, dest, is_silent=True, is_dry_run=False):
"""
sub command link
"""
main.Link(source, dest, is_silent, is_dry_run)
| Add is_silent & is_dry_run arguments to module API | Add is_silent & is_dry_run arguments to module API
This way all the features of the command line commands is also in the module API
| Python | mit | arecarn/dploy | """
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest):
"""
sub command stow
"""
main.Stow(sources, dest) # pylint: disable=protected-access
def unstow(sources, dest):
"""
sub command unstow
"""
main.UnStow(sources, dest) # pylint: disable=protected-access
def link(source, dest):
"""
sub command link
"""
main.Link(source, dest) # pylint: disable=protected-access
Add is_silent & is_dry_run arguments to module API
This way all the features of the command line commands is also in the module API | """
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command stow
"""
main.Stow(sources, dest, is_silent, is_dry_run)
def unstow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command unstow
"""
main.UnStow(sources, dest, is_silent, is_dry_run)
def link(source, dest, is_silent=True, is_dry_run=False):
"""
sub command link
"""
main.Link(source, dest, is_silent, is_dry_run)
| <commit_before>"""
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest):
"""
sub command stow
"""
main.Stow(sources, dest) # pylint: disable=protected-access
def unstow(sources, dest):
"""
sub command unstow
"""
main.UnStow(sources, dest) # pylint: disable=protected-access
def link(source, dest):
"""
sub command link
"""
main.Link(source, dest) # pylint: disable=protected-access
<commit_msg>Add is_silent & is_dry_run arguments to module API
This way all the features of the command line commands is also in the module API<commit_after> | """
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command stow
"""
main.Stow(sources, dest, is_silent, is_dry_run)
def unstow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command unstow
"""
main.UnStow(sources, dest, is_silent, is_dry_run)
def link(source, dest, is_silent=True, is_dry_run=False):
"""
sub command link
"""
main.Link(source, dest, is_silent, is_dry_run)
| """
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest):
"""
sub command stow
"""
main.Stow(sources, dest) # pylint: disable=protected-access
def unstow(sources, dest):
"""
sub command unstow
"""
main.UnStow(sources, dest) # pylint: disable=protected-access
def link(source, dest):
"""
sub command link
"""
main.Link(source, dest) # pylint: disable=protected-access
Add is_silent & is_dry_run arguments to module API
This way all the features of the command line commands is also in the module API"""
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command stow
"""
main.Stow(sources, dest, is_silent, is_dry_run)
def unstow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command unstow
"""
main.UnStow(sources, dest, is_silent, is_dry_run)
def link(source, dest, is_silent=True, is_dry_run=False):
"""
sub command link
"""
main.Link(source, dest, is_silent, is_dry_run)
| <commit_before>"""
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest):
"""
sub command stow
"""
main.Stow(sources, dest) # pylint: disable=protected-access
def unstow(sources, dest):
"""
sub command unstow
"""
main.UnStow(sources, dest) # pylint: disable=protected-access
def link(source, dest):
"""
sub command link
"""
main.Link(source, dest) # pylint: disable=protected-access
<commit_msg>Add is_silent & is_dry_run arguments to module API
This way all the features of the command line commands is also in the module API<commit_after>"""
dploy script is an attempt at creating a clone of GNU stow that will work on
Windows as well as *nix
"""
import sys
assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater"
import dploy.main as main
def stow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command stow
"""
main.Stow(sources, dest, is_silent, is_dry_run)
def unstow(sources, dest, is_silent=True, is_dry_run=False):
"""
sub command unstow
"""
main.UnStow(sources, dest, is_silent, is_dry_run)
def link(source, dest, is_silent=True, is_dry_run=False):
"""
sub command link
"""
main.Link(source, dest, is_silent, is_dry_run)
|
a5fa87d1dac36ae8a1e0939aaf7835aa39d5c153 | jsonapi.py | jsonapi.py | from flask import Blueprint
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
return "Create JSON Blueprint!"
| from flask import Blueprint, jsonify, request
import MySQLdb, dbc
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
# Get URL and password from POST Request
URL = request.form.get('url')
password = request.form.get('password')
custom = request.form.get('custom')
# Check if custom alias is set, if not, generate one
if custom == '':
alias = gen_rand_alias(10)
else:
alias = custom
# If password is incorrect, Rick Roll
if password not in dbc.passwords or password is None:
return jsonify(error="Password was not recognized.")
# Create database connection
db = MySQLdb.connect(host=dbc.server, user=dbc.user, passwd=dbc.passwd, db=dbc.db)
cursor = db.cursor()
# Insert Redirect URL and Alias into database
cursor.execute("INSERT INTO " + dbc.urltbl + " (url, alias) VALUES (\'" + URL + "\', \'" + alias + "\');")
# Commit to database and close connections
db.commit()
cursor.close()
db.close()
return jsonify(url="http://frst.xyz/" + alias)
| Add jsonify to api returns | Add jsonify to api returns | Python | apache-2.0 | kylefrost/frst.xyz,kylefrost/frst.xyz | from flask import Blueprint
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
return "Create JSON Blueprint!"
Add jsonify to api returns | from flask import Blueprint, jsonify, request
import MySQLdb, dbc
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
# Get URL and password from POST Request
URL = request.form.get('url')
password = request.form.get('password')
custom = request.form.get('custom')
# Check if custom alias is set, if not, generate one
if custom == '':
alias = gen_rand_alias(10)
else:
alias = custom
# If password is incorrect, Rick Roll
if password not in dbc.passwords or password is None:
return jsonify(error="Password was not recognized.")
# Create database connection
db = MySQLdb.connect(host=dbc.server, user=dbc.user, passwd=dbc.passwd, db=dbc.db)
cursor = db.cursor()
# Insert Redirect URL and Alias into database
cursor.execute("INSERT INTO " + dbc.urltbl + " (url, alias) VALUES (\'" + URL + "\', \'" + alias + "\');")
# Commit to database and close connections
db.commit()
cursor.close()
db.close()
return jsonify(url="http://frst.xyz/" + alias)
| <commit_before>from flask import Blueprint
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
return "Create JSON Blueprint!"
<commit_msg>Add jsonify to api returns<commit_after> | from flask import Blueprint, jsonify, request
import MySQLdb, dbc
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
# Get URL and password from POST Request
URL = request.form.get('url')
password = request.form.get('password')
custom = request.form.get('custom')
# Check if custom alias is set, if not, generate one
if custom == '':
alias = gen_rand_alias(10)
else:
alias = custom
# If password is incorrect, Rick Roll
if password not in dbc.passwords or password is None:
return jsonify(error="Password was not recognized.")
# Create database connection
db = MySQLdb.connect(host=dbc.server, user=dbc.user, passwd=dbc.passwd, db=dbc.db)
cursor = db.cursor()
# Insert Redirect URL and Alias into database
cursor.execute("INSERT INTO " + dbc.urltbl + " (url, alias) VALUES (\'" + URL + "\', \'" + alias + "\');")
# Commit to database and close connections
db.commit()
cursor.close()
db.close()
return jsonify(url="http://frst.xyz/" + alias)
| from flask import Blueprint
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
return "Create JSON Blueprint!"
Add jsonify to api returnsfrom flask import Blueprint, jsonify, request
import MySQLdb, dbc
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
# Get URL and password from POST Request
URL = request.form.get('url')
password = request.form.get('password')
custom = request.form.get('custom')
# Check if custom alias is set, if not, generate one
if custom == '':
alias = gen_rand_alias(10)
else:
alias = custom
# If password is incorrect, Rick Roll
if password not in dbc.passwords or password is None:
return jsonify(error="Password was not recognized.")
# Create database connection
db = MySQLdb.connect(host=dbc.server, user=dbc.user, passwd=dbc.passwd, db=dbc.db)
cursor = db.cursor()
# Insert Redirect URL and Alias into database
cursor.execute("INSERT INTO " + dbc.urltbl + " (url, alias) VALUES (\'" + URL + "\', \'" + alias + "\');")
# Commit to database and close connections
db.commit()
cursor.close()
db.close()
return jsonify(url="http://frst.xyz/" + alias)
| <commit_before>from flask import Blueprint
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
return "Create JSON Blueprint!"
<commit_msg>Add jsonify to api returns<commit_after>from flask import Blueprint, jsonify, request
import MySQLdb, dbc
# Set up api Blueprint
api = Blueprint('api', __name__)
# API Post Route
@api.route('/create', methods=['GET', 'POST'])
def api_create():
# Get URL and password from POST Request
URL = request.form.get('url')
password = request.form.get('password')
custom = request.form.get('custom')
# Check if custom alias is set, if not, generate one
if custom == '':
alias = gen_rand_alias(10)
else:
alias = custom
# If password is incorrect, Rick Roll
if password not in dbc.passwords or password is None:
return jsonify(error="Password was not recognized.")
# Create database connection
db = MySQLdb.connect(host=dbc.server, user=dbc.user, passwd=dbc.passwd, db=dbc.db)
cursor = db.cursor()
# Insert Redirect URL and Alias into database
cursor.execute("INSERT INTO " + dbc.urltbl + " (url, alias) VALUES (\'" + URL + "\', \'" + alias + "\');")
# Commit to database and close connections
db.commit()
cursor.close()
db.close()
return jsonify(url="http://frst.xyz/" + alias)
|
3347aaf8ad8fc1e016f1bf4159a91227cf8bc450 | billjobs/tests/tests_user_admin_api.py | billjobs/tests/tests_user_admin_api.py | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_anonymous_do_not_list_user(self):
request = self.factory.get('/billjobs/users/')
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| Test anonymous user do not access user list endpoint | Test anonymous user do not access user list endpoint
| Python | mit | ioO/billjobs | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Test anonymous user do not access user list endpoint | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_anonymous_do_not_list_user(self):
request = self.factory.get('/billjobs/users/')
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| <commit_before>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Test anonymous user do not access user list endpoint<commit_after> | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_anonymous_do_not_list_user(self):
request = self.factory.get('/billjobs/users/')
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Test anonymous user do not access user list endpointfrom django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_anonymous_do_not_list_user(self):
request = self.factory.get('/billjobs/users/')
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| <commit_before>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Test anonymous user do not access user list endpoint<commit_after>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_anonymous_do_not_list_user(self):
request = self.factory.get('/billjobs/users/')
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
bef6e3b13c2e524f606d6ef4157df93933548c22 | 11_first-try-except.py | 11_first-try-except.py | number = raw_input('Enter a number to find a square : ')
try :
actualNumber = int(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
| number = raw_input('Enter a number to find a square : ')
try :
# In order to accept floating numbers, we are converting the varibale to float.
actualNumber = float(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
| Convert variable from int to float, in order to accept floating value as well | Convert variable from int to float, in order to accept floating value as well
| Python | mit | rahulbohra/Python-Basic | number = raw_input('Enter a number to find a square : ')
try :
actualNumber = int(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
Convert variable from int to float, in order to accept floating value as well | number = raw_input('Enter a number to find a square : ')
try :
# In order to accept floating numbers, we are converting the varibale to float.
actualNumber = float(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
| <commit_before>number = raw_input('Enter a number to find a square : ')
try :
actualNumber = int(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
<commit_msg>Convert variable from int to float, in order to accept floating value as well<commit_after> | number = raw_input('Enter a number to find a square : ')
try :
# In order to accept floating numbers, we are converting the varibale to float.
actualNumber = float(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
| number = raw_input('Enter a number to find a square : ')
try :
actualNumber = int(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
Convert variable from int to float, in order to accept floating value as wellnumber = raw_input('Enter a number to find a square : ')
try :
# In order to accept floating numbers, we are converting the varibale to float.
actualNumber = float(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
| <commit_before>number = raw_input('Enter a number to find a square : ')
try :
actualNumber = int(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
<commit_msg>Convert variable from int to float, in order to accept floating value as well<commit_after>number = raw_input('Enter a number to find a square : ')
try :
# In order to accept floating numbers, we are converting the varibale to float.
actualNumber = float(number)**2
print 'Square of the number is', actualNumber
except :
print 'Instead of typing number you entered -', number
|
6e9e7e87efd220bb4f45172388161a7fc6f6d4ed | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 0.2.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
| Update RAML Cop version requirement to 1.0.0 | Update RAML Cop version requirement to 1.0.0
| Python | mit | thebinarypenguin/SublimeLinter-contrib-raml-cop | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 0.2.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
Update RAML Cop version requirement to 1.0.0 | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
| <commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 0.2.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
<commit_msg>Update RAML Cop version requirement to 1.0.0<commit_after> | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 0.2.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
Update RAML Cop version requirement to 1.0.0#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
| <commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 0.2.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
<commit_msg>Update RAML Cop version requirement to 1.0.0<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman,,,
# Copyright (c) 2014 Ethan Zimmerman,,,
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?:(?P<warning>WARNING)|(?P<error>ERROR)) '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
|
1bbb7d793b479e299f751e84a85f8bc9f40fc633 | microbower/__init__.py | microbower/__init__.py |
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
|
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
| Check for the asset directory once at the beginning | Check for the asset directory once at the beginning
| Python | isc | zenhack/microbower |
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
Check for the asset directory once at the beginning |
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
| <commit_before>
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
<commit_msg>Check for the asset directory once at the beginning<commit_after> |
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
|
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
Check for the asset directory once at the beginning
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
| <commit_before>
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
<commit_msg>Check for the asset directory once at the beginning<commit_after>
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
|
85384cb811c8e4cfdcaa1c207ac2274f352b86e9 | tensorflow_tutorials/__init__.py | tensorflow_tutorials/__init__.py | """
[Tutorials] for [TensorFlow].
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
| """
[Tutorials] for [TensorFlow].
[Source on GitHub][source].
[source]: https://github.com/rxedu/tensorflow-tutorials
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
| Add source link to docstring | Add source link to docstring
| Python | mit | rxedu/tensorflow-tutorials | """
[Tutorials] for [TensorFlow].
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
Add source link to docstring | """
[Tutorials] for [TensorFlow].
[Source on GitHub][source].
[source]: https://github.com/rxedu/tensorflow-tutorials
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
| <commit_before>"""
[Tutorials] for [TensorFlow].
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
<commit_msg>Add source link to docstring<commit_after> | """
[Tutorials] for [TensorFlow].
[Source on GitHub][source].
[source]: https://github.com/rxedu/tensorflow-tutorials
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
| """
[Tutorials] for [TensorFlow].
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
Add source link to docstring"""
[Tutorials] for [TensorFlow].
[Source on GitHub][source].
[source]: https://github.com/rxedu/tensorflow-tutorials
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
| <commit_before>"""
[Tutorials] for [TensorFlow].
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
<commit_msg>Add source link to docstring<commit_after>"""
[Tutorials] for [TensorFlow].
[Source on GitHub][source].
[source]: https://github.com/rxedu/tensorflow-tutorials
[Tutorials]: https://www.tensorflow.org/versions/r0.8/tutorials/index.html
[TensorFLow]: https://www.tensorflow.org/
"""
from .version import __version__
from .mnist import mnist
__all__ = [
'mnist'
]
|
c23f134cb8385919c8fe07136f978223ed229978 | micawber/cache.py | micawber/cache.py | from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename)) as fh:
contents = fh.read()
return pickle.loads(contents)
return {}
def save(self):
with closing(open(self.filename, 'w')) as fh:
fh.write(pickle.dumps(self._cache))
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
| from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename, 'rb')) as fh:
return pickle.load(fh)
return {}
def save(self):
with closing(open(self.filename, 'wb')) as fh:
pickle.dump(self._cache, fh)
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
| Fix PicleCache error on Python3 | Fix PicleCache error on Python3
| Python | mit | coleifer/micawber,coleifer/micawber | from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename)) as fh:
contents = fh.read()
return pickle.loads(contents)
return {}
def save(self):
with closing(open(self.filename, 'w')) as fh:
fh.write(pickle.dumps(self._cache))
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
Fix PicleCache error on Python3 | from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename, 'rb')) as fh:
return pickle.load(fh)
return {}
def save(self):
with closing(open(self.filename, 'wb')) as fh:
pickle.dump(self._cache, fh)
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
| <commit_before>from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename)) as fh:
contents = fh.read()
return pickle.loads(contents)
return {}
def save(self):
with closing(open(self.filename, 'w')) as fh:
fh.write(pickle.dumps(self._cache))
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
<commit_msg>Fix PicleCache error on Python3<commit_after> | from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename, 'rb')) as fh:
return pickle.load(fh)
return {}
def save(self):
with closing(open(self.filename, 'wb')) as fh:
pickle.dump(self._cache, fh)
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
| from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename)) as fh:
contents = fh.read()
return pickle.loads(contents)
return {}
def save(self):
with closing(open(self.filename, 'w')) as fh:
fh.write(pickle.dumps(self._cache))
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
Fix PicleCache error on Python3from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename, 'rb')) as fh:
return pickle.load(fh)
return {}
def save(self):
with closing(open(self.filename, 'wb')) as fh:
pickle.dump(self._cache, fh)
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
| <commit_before>from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename)) as fh:
contents = fh.read()
return pickle.loads(contents)
return {}
def save(self):
with closing(open(self.filename, 'w')) as fh:
fh.write(pickle.dumps(self._cache))
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
<commit_msg>Fix PicleCache error on Python3<commit_after>from __future__ import with_statement
import os
import pickle
from contextlib import closing
try:
from redis import Redis
except ImportError:
Redis = None
class Cache(object):
def __init__(self):
self._cache = {}
def get(self, k):
return self._cache.get(k)
def set(self, k, v):
self._cache[k] = v
class PickleCache(Cache):
def __init__(self, filename='cache.db'):
self.filename = filename
self._cache = self.load()
def load(self):
if os.path.exists(self.filename):
with closing(open(self.filename, 'rb')) as fh:
return pickle.load(fh)
return {}
def save(self):
with closing(open(self.filename, 'wb')) as fh:
pickle.dump(self._cache, fh)
if Redis:
class RedisCache(Cache):
def __init__(self, namespace='micawber', **conn):
self.namespace = namespace
self.key_fn = lambda self, k: '%s.%s' % (self.namespace, k)
self.conn = Redis(**conn)
def get(self, k):
cached = self.conn.get(self.key_fn(k))
if cached:
return pickle.loads(cached)
def set(self, k, v):
self.conn.set(self.key_fn(k), pickle.dumps(v))
|
81d2882d1558ed52fc70927d745474aa46ac1f3b | jarbas/dashboard/admin.py | jarbas/dashboard/admin.py | from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
| from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
readonly_fields = tuple(f.name for f in Reimbursement._meta.fields)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
| Mark all fields as read only in the dashboard | Mark all fields as read only in the dashboard
| Python | mit | datasciencebr/jarbas,datasciencebr/jarbas,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,datasciencebr/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor | from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
Mark all fields as read only in the dashboard | from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
readonly_fields = tuple(f.name for f in Reimbursement._meta.fields)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
| <commit_before>from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
<commit_msg>Mark all fields as read only in the dashboard<commit_after> | from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
readonly_fields = tuple(f.name for f in Reimbursement._meta.fields)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
| from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
Mark all fields as read only in the dashboardfrom django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
readonly_fields = tuple(f.name for f in Reimbursement._meta.fields)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
| <commit_before>from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
<commit_msg>Mark all fields as read only in the dashboard<commit_after>from django.contrib import admin
from jarbas.core.models import Reimbursement
class SuspiciousListFilter(admin.SimpleListFilter):
title = 'Is suspicious'
parameter_name = 'is_suspicions'
def lookups(self, request, model_admin):
return (
('yes', 'Yes'),
('no', 'No'),
)
def queryset(self, request, queryset):
return queryset.suspicions() if self.value() == 'yes' else queryset
class ReimbursementModelAdmin(admin.ModelAdmin):
list_display = (
'document_id',
'congressperson_name',
'year',
'subquota_description',
'supplier',
'cnpj_cpf',
'is_suspicious',
'total_net_value',
'available_in_latest_dataset',
)
search_fields = (
'applicant_id',
'cnpj_cpf',
'congressperson_name',
'document_id',
'party',
'state',
'supplier',
)
list_filter = (
SuspiciousListFilter,
'available_in_latest_dataset',
'year',
'state',
)
readonly_fields = tuple(f.name for f in Reimbursement._meta.fields)
def is_suspicious(self, obj):
return obj.suspicions is not None
is_suspicious.short_description = 'Suspicious'
is_suspicious.boolean = True
admin.site.register(Reimbursement, ReimbursementModelAdmin)
|
7d4d1afc5a42edb88f5cb8eb1347b79fdc131272 | src/actions/client.py | src/actions/client.py | from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
| from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
| Stop reactor and find files | Stop reactor and find files
| Python | mit | derwolfe/teiler,derwolfe/teiler | from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
Stop reactor and find files | from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
| <commit_before>from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
<commit_msg>Stop reactor and find files<commit_after> | from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
| from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
Stop reactor and find filesfrom twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
| <commit_before>from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
self.urls.append(link)
print link
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
# once you receive the message then add it to the
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# is the reactor done?
get_file_urls(fileserver)
if __name__ == '__main__':
main()
<commit_msg>Stop reactor and find files<commit_after>from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from twisted.application.internet import MulticastServer
from BeautifulSoup import BeautifulSoup, SoupStrainer
import requests
fileserver = ''
urls = []
def get_file_urls(self, url):
f = requests.get("http://" + url)
for link in BeautifulSoup(f, parseOnlyThese=SoupStrainer('a')):
urls.append(link)
def get_files():
pass
class MulticastClientUDP(DatagramProtocol):
def __init__(self):
self.host = '224.0.0.5'
def startProtocol(self):
# this could be placed in a config
self.transport.joinGroup(self.host)
def datagramReceived(self, datagram, address):
print "Received: " + repr(datagram)
fileserver = repr(datagram).replace("'", "")
# this will need more checking - it is killing the conn once it receives the address
self.transport.loseConnection()
reactor.stop()
def main():
print "Listening"
reactor.listenMulticast(8005,
MulticastClientUDP(),
listenMultiple = True)
reactor.run()
# reactor is closed at this point.
get_file_urls(fileserver)
if __name__ == '__main__':
main()
|
c11d0ec668a0755a9c5db2cb4dd372d8ab3e8a0d | .circleci/get_repos.py | .circleci/get_repos.py | from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
| from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
| Update with new metadata file system (removing use of ggd repo cloning) | Update with new metadata file system (removing use of ggd repo cloning)
| Python | mit | gogetdata/ggd-cli,gogetdata/ggd-cli | from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
Update with new metadata file system (removing use of ggd repo cloning) | from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
| <commit_before>from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
<commit_msg>Update with new metadata file system (removing use of ggd repo cloning)<commit_after> | from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
| from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
Update with new metadata file system (removing use of ggd repo cloning)from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
| <commit_before>from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
<commit_msg>Update with new metadata file system (removing use of ggd repo cloning)<commit_after>from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
|
90f306421f695a596bf4a0a2dce3b52c44145889 | zilencer/migrations/0001_initial.py | zilencer/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name=b'_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name='_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
| Replace bytes by strings in zilencer/migrations. | Replace bytes by strings in zilencer/migrations.
| Python | apache-2.0 | timabbott/zulip,vabs22/zulip,reyha/zulip,grave-w-grave/zulip,zulip/zulip,rht/zulip,andersk/zulip,punchagan/zulip,niftynei/zulip,KingxBanana/zulip,susansls/zulip,jphilipsen05/zulip,jainayush975/zulip,SmartPeople/zulip,isht3/zulip,jphilipsen05/zulip,Galexrt/zulip,souravbadami/zulip,paxapy/zulip,synicalsyntax/zulip,brainwane/zulip,jphilipsen05/zulip,tommyip/zulip,vikas-parashar/zulip,j831/zulip,dattatreya303/zulip,JPJPJPOPOP/zulip,blaze225/zulip,samatdav/zulip,blaze225/zulip,susansls/zulip,dhcrzf/zulip,grave-w-grave/zulip,vabs22/zulip,krtkmj/zulip,PhilSk/zulip,vabs22/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,Juanvulcano/zulip,aakash-cr7/zulip,Jianchun1/zulip,jainayush975/zulip,verma-varsha/zulip,TigorC/zulip,tommyip/zulip,niftynei/zulip,AZtheAsian/zulip,cosmicAsymmetry/zulip,Galexrt/zulip,jackrzhang/zulip,j831/zulip,timabbott/zulip,christi3k/zulip,niftynei/zulip,niftynei/zulip,punchagan/zulip,sup95/zulip,kou/zulip,punchagan/zulip,mohsenSy/zulip,punchagan/zulip,brockwhittaker/zulip,aakash-cr7/zulip,tommyip/zulip,isht3/zulip,aakash-cr7/zulip,hackerkid/zulip,AZtheAsian/zulip,arpith/zulip,showell/zulip,zacps/zulip,joyhchen/zulip,Galexrt/zulip,andersk/zulip,reyha/zulip,synicalsyntax/zulip,showell/zulip,punchagan/zulip,jphilipsen05/zulip,christi3k/zulip,christi3k/zulip,AZtheAsian/zulip,JPJPJPOPOP/zulip,amyliu345/zulip,j831/zulip,sharmaeklavya2/zulip,brainwane/zulip,joyhchen/zulip,umkay/zulip,synicalsyntax/zulip,calvinleenyc/zulip,jainayush975/zulip,synicalsyntax/zulip,andersk/zulip,rht/zulip,kou/zulip,peguin40/zulip,brainwane/zulip,mohsenSy/zulip,showell/zulip,verma-varsha/zulip,timabbott/zulip,rht/zulip,KingxBanana/zulip,Jianchun1/zulip,christi3k/zulip,amyliu345/zulip,peguin40/zulip,brainwane/zulip,rht/zulip,TigorC/zulip,grave-w-grave/zulip,cosmicAsymmetry/zulip,PhilSk/zulip,jrowan/zulip,SmartPeople/zulip,niftynei/zulip,PhilSk/zulip,amanharitsh123/zulip,jackrzhang/zulip,vaidap/zulip,blaze225/zulip,umkay/zulip,souravbadami/zulip,samatdav/zulip,jphilipsen05/zulip,reyha/zulip,amyliu345/zulip,sonali0901/zulip,JPJPJPOPOP/zulip,blaze225/zulip,cosmicAsymmetry/zulip,arpith/zulip,ryanbackman/zulip,susansls/zulip,KingxBanana/zulip,vaidap/zulip,dawran6/zulip,jainayush975/zulip,brockwhittaker/zulip,Diptanshu8/zulip,kou/zulip,krtkmj/zulip,shubhamdhama/zulip,amyliu345/zulip,timabbott/zulip,SmartPeople/zulip,cosmicAsymmetry/zulip,Galexrt/zulip,ahmadassaf/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,Jianchun1/zulip,krtkmj/zulip,grave-w-grave/zulip,SmartPeople/zulip,sup95/zulip,verma-varsha/zulip,jainayush975/zulip,ryanbackman/zulip,synicalsyntax/zulip,joyhchen/zulip,dhcrzf/zulip,dattatreya303/zulip,vaidap/zulip,rishig/zulip,cosmicAsymmetry/zulip,eeshangarg/zulip,arpith/zulip,shubhamdhama/zulip,aakash-cr7/zulip,amanharitsh123/zulip,JPJPJPOPOP/zulip,timabbott/zulip,rishig/zulip,jphilipsen05/zulip,vaidap/zulip,cosmicAsymmetry/zulip,sharmaeklavya2/zulip,zacps/zulip,tommyip/zulip,peguin40/zulip,brockwhittaker/zulip,jackrzhang/zulip,reyha/zulip,sonali0901/zulip,christi3k/zulip,showell/zulip,sonali0901/zulip,synicalsyntax/zulip,rishig/zulip,eeshangarg/zulip,amanharitsh123/zulip,vikas-parashar/zulip,rishig/zulip,showell/zulip,mahim97/zulip,mahim97/zulip,dattatreya303/zulip,TigorC/zulip,sonali0901/zulip,umkay/zulip,arpith/zulip,krtkmj/zulip,verma-varsha/zulip,hackerkid/zulip,krtkmj/zulip,krtkmj/zulip,brainwane/zulip,mohsenSy/zulip,isht3/zulip,dhcrzf/zulip,jackrzhang/zulip,vikas-parashar/zulip,susansls/zulip,mahim97/zulip,kou/zulip,verma-varsha/zulip,vikas-parashar/zulip,SmartPeople/zulip,brockwhittaker/zulip,kou/zulip,calvinleenyc/zulip,jrowan/zulip,ahmadassaf/zulip,verma-varsha/zulip,eeshangarg/zulip,samatdav/zulip,rishig/zulip,reyha/zulip,Diptanshu8/zulip,Diptanshu8/zulip,amyliu345/zulip,mahim97/zulip,ryanbackman/zulip,amanharitsh123/zulip,PhilSk/zulip,Jianchun1/zulip,ahmadassaf/zulip,grave-w-grave/zulip,zacps/zulip,brainwane/zulip,mohsenSy/zulip,hackerkid/zulip,jrowan/zulip,rht/zulip,dhcrzf/zulip,brockwhittaker/zulip,zulip/zulip,tommyip/zulip,vaidap/zulip,mahim97/zulip,sharmaeklavya2/zulip,j831/zulip,punchagan/zulip,tommyip/zulip,dattatreya303/zulip,shubhamdhama/zulip,hackerkid/zulip,shubhamdhama/zulip,niftynei/zulip,jackrzhang/zulip,dhcrzf/zulip,brockwhittaker/zulip,rishig/zulip,jackrzhang/zulip,calvinleenyc/zulip,synicalsyntax/zulip,peguin40/zulip,TigorC/zulip,blaze225/zulip,amanharitsh123/zulip,mohsenSy/zulip,TigorC/zulip,amanharitsh123/zulip,joyhchen/zulip,JPJPJPOPOP/zulip,brainwane/zulip,paxapy/zulip,zulip/zulip,hackerkid/zulip,ryanbackman/zulip,dawran6/zulip,andersk/zulip,TigorC/zulip,kou/zulip,AZtheAsian/zulip,sup95/zulip,aakash-cr7/zulip,SmartPeople/zulip,Diptanshu8/zulip,jackrzhang/zulip,sup95/zulip,calvinleenyc/zulip,isht3/zulip,souravbadami/zulip,kou/zulip,j831/zulip,andersk/zulip,Galexrt/zulip,PhilSk/zulip,Juanvulcano/zulip,isht3/zulip,zulip/zulip,paxapy/zulip,andersk/zulip,sharmaeklavya2/zulip,peguin40/zulip,AZtheAsian/zulip,punchagan/zulip,samatdav/zulip,samatdav/zulip,mohsenSy/zulip,ahmadassaf/zulip,showell/zulip,ryanbackman/zulip,paxapy/zulip,sup95/zulip,isht3/zulip,tommyip/zulip,eeshangarg/zulip,calvinleenyc/zulip,vikas-parashar/zulip,zacps/zulip,zacps/zulip,ryanbackman/zulip,paxapy/zulip,timabbott/zulip,Juanvulcano/zulip,zulip/zulip,jrowan/zulip,Juanvulcano/zulip,ahmadassaf/zulip,KingxBanana/zulip,samatdav/zulip,JPJPJPOPOP/zulip,KingxBanana/zulip,rht/zulip,umkay/zulip,shubhamdhama/zulip,calvinleenyc/zulip,peguin40/zulip,KingxBanana/zulip,krtkmj/zulip,dawran6/zulip,Jianchun1/zulip,sonali0901/zulip,zulip/zulip,souravbadami/zulip,eeshangarg/zulip,sonali0901/zulip,andersk/zulip,ahmadassaf/zulip,Diptanshu8/zulip,souravbadami/zulip,zacps/zulip,vabs22/zulip,susansls/zulip,j831/zulip,zulip/zulip,amyliu345/zulip,christi3k/zulip,eeshangarg/zulip,dawran6/zulip,hackerkid/zulip,shubhamdhama/zulip,vikas-parashar/zulip,PhilSk/zulip,Diptanshu8/zulip,dawran6/zulip,umkay/zulip,joyhchen/zulip,jrowan/zulip,vaidap/zulip,jrowan/zulip,shubhamdhama/zulip,Galexrt/zulip,dhcrzf/zulip,Juanvulcano/zulip,dhcrzf/zulip,susansls/zulip,ahmadassaf/zulip,blaze225/zulip,arpith/zulip,umkay/zulip,rishig/zulip,aakash-cr7/zulip,vabs22/zulip,grave-w-grave/zulip,jainayush975/zulip,rht/zulip,showell/zulip,Juanvulcano/zulip,vabs22/zulip,eeshangarg/zulip,reyha/zulip,joyhchen/zulip,mahim97/zulip,souravbadami/zulip,timabbott/zulip,umkay/zulip,sup95/zulip,paxapy/zulip,arpith/zulip,Galexrt/zulip,dawran6/zulip,hackerkid/zulip,AZtheAsian/zulip,Jianchun1/zulip | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name=b'_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
Replace bytes by strings in zilencer/migrations. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name='_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name=b'_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
<commit_msg>Replace bytes by strings in zilencer/migrations.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name='_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name=b'_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
Replace bytes by strings in zilencer/migrations.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name='_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name=b'_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
<commit_msg>Replace bytes by strings in zilencer/migrations.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Deployment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_active', models.BooleanField(default=True)),
('api_key', models.CharField(max_length=32, null=True)),
('base_api_url', models.CharField(max_length=128)),
('base_site_url', models.CharField(max_length=128)),
('realms', models.ManyToManyField(related_name='_deployments', to='zerver.Realm')),
],
options={
},
bases=(models.Model,),
),
]
|
1c727e878402ffacf14c2978860d7d555c5f4069 | zoe_lib/predefined_apps/__init__.py | zoe_lib/predefined_apps/__init__.py | # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
| # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
| Fix import error due to wrong import line | Fix import error due to wrong import line
| Python | apache-2.0 | DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe | # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
Fix import error due to wrong import line | # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
| <commit_before># Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
<commit_msg>Fix import error due to wrong import line<commit_after> | # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
| # Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
Fix import error due to wrong import line# Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
| <commit_before># Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
<commit_msg>Fix import error due to wrong import line<commit_after># Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_lib.predefined_apps.copier import copier_app
from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app
from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app
from zoe_lib.predefined_apps.hdfs import hdfs_app
from zoe_lib.predefined_apps.openmpi import openmpi_app
from zoe_lib.predefined_apps.spark_submit import spark_submit_app
from zoe_lib.predefined_apps.test_sleep import sleeper_app
PREDEFINED_APPS = [
copier_app,
spark_jupyter_notebook_app,
spark_jupyter_notebook_lab_app,
hdfs_app,
openmpi_app,
spark_submit_app,
sleeper_app
]
|
1c5af88a0689aadab4069f9f2ad16164791624b3 | Discord/utilities/errors.py | Discord/utilities/errors.py |
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
| Remove no longer used Missing Capability error | [Discord] Remove no longer used Missing Capability error
| Python | mit | Harmon758/Harmonbot,Harmon758/Harmonbot |
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
[Discord] Remove no longer used Missing Capability error |
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
| <commit_before>
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
<commit_msg>[Discord] Remove no longer used Missing Capability error<commit_after> |
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
[Discord] Remove no longer used Missing Capability error
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
| <commit_before>
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class MissingCapability(CommandError):
'''Missing Capability'''
def __init__(self, permissions):
self.permissions = permissions
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
<commit_msg>[Discord] Remove no longer used Missing Capability error<commit_after>
from discord.ext.commands.errors import CommandError
class NotServerOwner(CommandError):
'''Not Server Owner'''
pass
class VoiceNotConnected(CommandError):
'''Voice Not Connected'''
pass
class PermittedVoiceNotConnected(VoiceNotConnected):
'''Permitted, but Voice Not Connected'''
pass
class NotPermittedVoiceNotConnected(VoiceNotConnected):
'''Voice Not Connected, and Not Permitted'''
pass
class MissingPermissions(CommandError):
'''Missing Permissions'''
pass
class NotPermitted(CommandError):
'''Not Permitted'''
pass
class AudioError(CommandError):
'''Audio Error'''
pass
|
fb5c2e5df4f700fb19663bbe96e7aa2710e627ca | osprey/execute_dump.py | osprey/execute_dump.py | from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
value = json.dumps(items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
| from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
new_items = []
# Instead of saving the parameters on their own nested dict,
# save them along the rest of elements
for item in items:
parameters = item.pop('parameters') # remove dict
item.update(parameters) # update original dict with the parameters
new_items.append(item)
value = json.dumps(new_items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
| Store hyperparameters with the other settings | Store hyperparameters with the other settings
Instead of storing them in their own 'parameters' directory. | Python | apache-2.0 | msmbuilder/osprey,msultan/osprey,pandegroup/osprey,msultan/osprey,msmbuilder/osprey,pandegroup/osprey | from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
value = json.dumps(items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
Store hyperparameters with the other settings
Instead of storing them in their own 'parameters' directory. | from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
new_items = []
# Instead of saving the parameters on their own nested dict,
# save them along the rest of elements
for item in items:
parameters = item.pop('parameters') # remove dict
item.update(parameters) # update original dict with the parameters
new_items.append(item)
value = json.dumps(new_items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
| <commit_before>from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
value = json.dumps(items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
<commit_msg>Store hyperparameters with the other settings
Instead of storing them in their own 'parameters' directory. <commit_after> | from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
new_items = []
# Instead of saving the parameters on their own nested dict,
# save them along the rest of elements
for item in items:
parameters = item.pop('parameters') # remove dict
item.update(parameters) # update original dict with the parameters
new_items.append(item)
value = json.dumps(new_items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
| from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
value = json.dumps(items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
Store hyperparameters with the other settings
Instead of storing them in their own 'parameters' directory. from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
new_items = []
# Instead of saving the parameters on their own nested dict,
# save them along the rest of elements
for item in items:
parameters = item.pop('parameters') # remove dict
item.update(parameters) # update original dict with the parameters
new_items.append(item)
value = json.dumps(new_items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
| <commit_before>from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
value = json.dumps(items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
<commit_msg>Store hyperparameters with the other settings
Instead of storing them in their own 'parameters' directory. <commit_after>from __future__ import print_function, absolute_import, division
import csv
import json
from six.moves import cStringIO
from .config import Config
from .trials import Trial
def execute(args, parser):
config = Config(args.config, verbose=False)
session = config.trials()
columns = Trial.__mapper__.columns
if args.output == 'json':
items = [curr.to_dict() for curr in session.query(Trial).all()]
new_items = []
# Instead of saving the parameters on their own nested dict,
# save them along the rest of elements
for item in items:
parameters = item.pop('parameters') # remove dict
item.update(parameters) # update original dict with the parameters
new_items.append(item)
value = json.dumps(new_items)
elif args.output == 'csv':
buf = cStringIO()
outcsv = csv.writer(buf)
outcsv.writerow([column.name for column in columns])
for curr in session.query(Trial).all():
row = [getattr(curr, column.name) for column in columns]
outcsv.writerow(row)
value = buf.getvalue()
print(value)
return value
|
0b13092a7854fe2d967d057221420a57b7a37b16 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| Change module docstring to make Travis CI build pass | Change module docstring to make Travis CI build pass
| Python | mit | jackbrewer/SublimeLinter-contrib-stylint | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
Change module docstring to make Travis CI build pass | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| <commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
<commit_msg>Change module docstring to make Travis CI build pass<commit_after> | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
Change module docstring to make Travis CI build pass#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| <commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
<commit_msg>Change module docstring to make Travis CI build pass<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
9037c6c67add92304b6cfdbfb3a79ac1b3e9e64e | test/checker/test_checker_binary.py | test/checker/test_checker_binary.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| Fix test cases for Python2 | Fix test cases for Python2
| Python | mit | thombashi/typepy | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
Fix test cases for Python2 | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
<commit_msg>Fix test cases for Python2<commit_after> | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
Fix test cases for Python2# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
<commit_msg>Fix test cases for Python2<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
|
8af1f7a0525f69a6e2ee6c5cfd7d6a923873a7ec | froide/helper/auth.py | froide/helper/auth.py | from django.contrib.auth.backends import ModelBackend
from django.core.validators import email_re
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if email_re.search(username):
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
| from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
validate_email(username)
except ValidationError:
return None
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
| Validate email the correct way | Validate email the correct way | Python | mit | catcosmo/froide,ryankanno/froide,okfse/froide,fin/froide,LilithWittmann/froide,fin/froide,ryankanno/froide,CodeforHawaii/froide,stefanw/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,LilithWittmann/froide,okfse/froide,stefanw/froide,CodeforHawaii/froide,catcosmo/froide,CodeforHawaii/froide,ryankanno/froide,ryankanno/froide,stefanw/froide,fin/froide,catcosmo/froide,LilithWittmann/froide,CodeforHawaii/froide,CodeforHawaii/froide,fin/froide,stefanw/froide,okfse/froide,stefanw/froide,okfse/froide,LilithWittmann/froide,catcosmo/froide,catcosmo/froide | from django.contrib.auth.backends import ModelBackend
from django.core.validators import email_re
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if email_re.search(username):
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
Validate email the correct way | from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
validate_email(username)
except ValidationError:
return None
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
| <commit_before>from django.contrib.auth.backends import ModelBackend
from django.core.validators import email_re
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if email_re.search(username):
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
<commit_msg>Validate email the correct way<commit_after> | from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
validate_email(username)
except ValidationError:
return None
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
| from django.contrib.auth.backends import ModelBackend
from django.core.validators import email_re
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if email_re.search(username):
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
Validate email the correct wayfrom django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
validate_email(username)
except ValidationError:
return None
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
| <commit_before>from django.contrib.auth.backends import ModelBackend
from django.core.validators import email_re
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if email_re.search(username):
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
<commit_msg>Validate email the correct way<commit_after>from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth import models, load_backend, login
from django.conf import settings
class EmailBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
validate_email(username)
except ValidationError:
return None
try:
user = models.User.objects.get(email=username)
if user.check_password(password):
return user
except models.User.DoesNotExist:
return None
return None
def login_user(request, user):
if not hasattr(user, 'backend'):
for backend in settings.AUTHENTICATION_BACKENDS:
if user == load_backend(backend).get_user(user.pk):
user.backend = backend
break
if hasattr(user, 'backend'):
return login(request, user)
|
4ced26baaf25df6211940fa87ce7eaa9eee776f4 | src/judge/compiler.py | src/judge/compiler.py | import hashlib
import os
import subprocess
class CompilationError(Exception):
def __init__(self,e):
self.e = e
def __str__(self):
return repr(self.e)
def compile(f):
filename_with_extension = os.path.basename(f)
name_of_binary_executable = hashlib.md5(open(f,'r').read().encode('utf-8')).hexdigest()
try:
result = subprocess.run(["gcc",filename_with_extension,"-o",name_of_binary_executable],stdout=subprocess.PIPE,stderr=subprocess.PIPE,check=True)
except subprocess.CalledProcessError as e:
raise CompilationError(e)
| Add basic compiling ability capable of throwing 'CompilationError'. | judge: Add basic compiling ability capable of throwing 'CompilationError'.
| Python | mit | basu96/crux-judge,CRUx-BPHC/crux-judge,basu96/crux-judge,CRUx-BPHC/crux-judge,CRUx-BPHC/crux-judge,basu96/crux-judge,basu96/crux-judge,basu96/crux-judge | judge: Add basic compiling ability capable of throwing 'CompilationError'. | import hashlib
import os
import subprocess
class CompilationError(Exception):
def __init__(self,e):
self.e = e
def __str__(self):
return repr(self.e)
def compile(f):
filename_with_extension = os.path.basename(f)
name_of_binary_executable = hashlib.md5(open(f,'r').read().encode('utf-8')).hexdigest()
try:
result = subprocess.run(["gcc",filename_with_extension,"-o",name_of_binary_executable],stdout=subprocess.PIPE,stderr=subprocess.PIPE,check=True)
except subprocess.CalledProcessError as e:
raise CompilationError(e)
| <commit_before><commit_msg>judge: Add basic compiling ability capable of throwing 'CompilationError'.<commit_after> | import hashlib
import os
import subprocess
class CompilationError(Exception):
def __init__(self,e):
self.e = e
def __str__(self):
return repr(self.e)
def compile(f):
filename_with_extension = os.path.basename(f)
name_of_binary_executable = hashlib.md5(open(f,'r').read().encode('utf-8')).hexdigest()
try:
result = subprocess.run(["gcc",filename_with_extension,"-o",name_of_binary_executable],stdout=subprocess.PIPE,stderr=subprocess.PIPE,check=True)
except subprocess.CalledProcessError as e:
raise CompilationError(e)
| judge: Add basic compiling ability capable of throwing 'CompilationError'.import hashlib
import os
import subprocess
class CompilationError(Exception):
def __init__(self,e):
self.e = e
def __str__(self):
return repr(self.e)
def compile(f):
filename_with_extension = os.path.basename(f)
name_of_binary_executable = hashlib.md5(open(f,'r').read().encode('utf-8')).hexdigest()
try:
result = subprocess.run(["gcc",filename_with_extension,"-o",name_of_binary_executable],stdout=subprocess.PIPE,stderr=subprocess.PIPE,check=True)
except subprocess.CalledProcessError as e:
raise CompilationError(e)
| <commit_before><commit_msg>judge: Add basic compiling ability capable of throwing 'CompilationError'.<commit_after>import hashlib
import os
import subprocess
class CompilationError(Exception):
def __init__(self,e):
self.e = e
def __str__(self):
return repr(self.e)
def compile(f):
filename_with_extension = os.path.basename(f)
name_of_binary_executable = hashlib.md5(open(f,'r').read().encode('utf-8')).hexdigest()
try:
result = subprocess.run(["gcc",filename_with_extension,"-o",name_of_binary_executable],stdout=subprocess.PIPE,stderr=subprocess.PIPE,check=True)
except subprocess.CalledProcessError as e:
raise CompilationError(e)
| |
3e6798113d3f1ddc08f4db7d65f3130ea2211dd7 | nursereg/admin.py | nursereg/admin.py | from django.contrib import admin
from .models import NurseSource, NurseReg
admin.site.register(NurseSource)
admin.site.register(NurseReg)
| from django.contrib import admin
from control.utils import CsvExportAdminMixin
from .models import NurseSource, NurseReg
class NurseRegAdmin(CsvExportAdminMixin, admin.ModelAdmin):
csv_header = [
'cmsisdn', 'dmsisdn', 'rmsisdn', 'faccode',
'id_type', 'id_no', 'passport_origin', 'dob',
'nurse_source', 'persal_no', 'opted_out',
'optout_reason', 'optout_count', 'sanc_reg_no',
'created_at', 'updated_at']
def clean_csv_line(self, model):
return [
model.cmsisdn, model.dmsisdn, model.rmsisdn, model.faccode,
model.id_type, model.id_no, model.passport_origin, model.dob,
model.nurse_source, model.persal_no, model.opted_out,
model.optout_reason, model.optout_count, model.sanc_reg_no,
model.created_at, model.updated_at]
admin.site.register(NurseSource)
admin.site.register(NurseReg, NurseRegAdmin)
| Add export inline for Nurse Registrations | Add export inline for Nurse Registrations
| Python | bsd-3-clause | praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control | from django.contrib import admin
from .models import NurseSource, NurseReg
admin.site.register(NurseSource)
admin.site.register(NurseReg)
Add export inline for Nurse Registrations | from django.contrib import admin
from control.utils import CsvExportAdminMixin
from .models import NurseSource, NurseReg
class NurseRegAdmin(CsvExportAdminMixin, admin.ModelAdmin):
csv_header = [
'cmsisdn', 'dmsisdn', 'rmsisdn', 'faccode',
'id_type', 'id_no', 'passport_origin', 'dob',
'nurse_source', 'persal_no', 'opted_out',
'optout_reason', 'optout_count', 'sanc_reg_no',
'created_at', 'updated_at']
def clean_csv_line(self, model):
return [
model.cmsisdn, model.dmsisdn, model.rmsisdn, model.faccode,
model.id_type, model.id_no, model.passport_origin, model.dob,
model.nurse_source, model.persal_no, model.opted_out,
model.optout_reason, model.optout_count, model.sanc_reg_no,
model.created_at, model.updated_at]
admin.site.register(NurseSource)
admin.site.register(NurseReg, NurseRegAdmin)
| <commit_before>from django.contrib import admin
from .models import NurseSource, NurseReg
admin.site.register(NurseSource)
admin.site.register(NurseReg)
<commit_msg>Add export inline for Nurse Registrations<commit_after> | from django.contrib import admin
from control.utils import CsvExportAdminMixin
from .models import NurseSource, NurseReg
class NurseRegAdmin(CsvExportAdminMixin, admin.ModelAdmin):
csv_header = [
'cmsisdn', 'dmsisdn', 'rmsisdn', 'faccode',
'id_type', 'id_no', 'passport_origin', 'dob',
'nurse_source', 'persal_no', 'opted_out',
'optout_reason', 'optout_count', 'sanc_reg_no',
'created_at', 'updated_at']
def clean_csv_line(self, model):
return [
model.cmsisdn, model.dmsisdn, model.rmsisdn, model.faccode,
model.id_type, model.id_no, model.passport_origin, model.dob,
model.nurse_source, model.persal_no, model.opted_out,
model.optout_reason, model.optout_count, model.sanc_reg_no,
model.created_at, model.updated_at]
admin.site.register(NurseSource)
admin.site.register(NurseReg, NurseRegAdmin)
| from django.contrib import admin
from .models import NurseSource, NurseReg
admin.site.register(NurseSource)
admin.site.register(NurseReg)
Add export inline for Nurse Registrationsfrom django.contrib import admin
from control.utils import CsvExportAdminMixin
from .models import NurseSource, NurseReg
class NurseRegAdmin(CsvExportAdminMixin, admin.ModelAdmin):
csv_header = [
'cmsisdn', 'dmsisdn', 'rmsisdn', 'faccode',
'id_type', 'id_no', 'passport_origin', 'dob',
'nurse_source', 'persal_no', 'opted_out',
'optout_reason', 'optout_count', 'sanc_reg_no',
'created_at', 'updated_at']
def clean_csv_line(self, model):
return [
model.cmsisdn, model.dmsisdn, model.rmsisdn, model.faccode,
model.id_type, model.id_no, model.passport_origin, model.dob,
model.nurse_source, model.persal_no, model.opted_out,
model.optout_reason, model.optout_count, model.sanc_reg_no,
model.created_at, model.updated_at]
admin.site.register(NurseSource)
admin.site.register(NurseReg, NurseRegAdmin)
| <commit_before>from django.contrib import admin
from .models import NurseSource, NurseReg
admin.site.register(NurseSource)
admin.site.register(NurseReg)
<commit_msg>Add export inline for Nurse Registrations<commit_after>from django.contrib import admin
from control.utils import CsvExportAdminMixin
from .models import NurseSource, NurseReg
class NurseRegAdmin(CsvExportAdminMixin, admin.ModelAdmin):
csv_header = [
'cmsisdn', 'dmsisdn', 'rmsisdn', 'faccode',
'id_type', 'id_no', 'passport_origin', 'dob',
'nurse_source', 'persal_no', 'opted_out',
'optout_reason', 'optout_count', 'sanc_reg_no',
'created_at', 'updated_at']
def clean_csv_line(self, model):
return [
model.cmsisdn, model.dmsisdn, model.rmsisdn, model.faccode,
model.id_type, model.id_no, model.passport_origin, model.dob,
model.nurse_source, model.persal_no, model.opted_out,
model.optout_reason, model.optout_count, model.sanc_reg_no,
model.created_at, model.updated_at]
admin.site.register(NurseSource)
admin.site.register(NurseReg, NurseRegAdmin)
|
810450d36084094c633646b14e123be8ae0e3602 | src/services/TemperatureMonitor/src/TemperatureMonitor.py | src/services/TemperatureMonitor/src/TemperatureMonitor.py | import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.append(self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
| import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.insert(0,self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
| Add New Temps to Beginning | Add New Temps to Beginning
| Python | mit | IAPark/PITherm | import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.append(self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
Add New Temps to Beginning | import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.insert(0,self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
| <commit_before>import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.append(self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
<commit_msg>Add New Temps to Beginning<commit_after> | import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.insert(0,self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
| import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.append(self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
Add New Temps to Beginningimport time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.insert(0,self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
| <commit_before>import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.append(self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
<commit_msg>Add New Temps to Beginning<commit_after>import time
import requests
class TemperatureMonitor:
def __init__(self, temperature_sensor, interval=60, smoothing=5, observers=()):
self.temperature_sensor = temperature_sensor
self.interval = interval
self.smoothing = smoothing
self.observers = observers
self.history = []
def run(self):
while True:
self.check_temp()
time.sleep(self.interval)
def check_temp(self):
self.history = self.history[:self.smoothing-1]
self.history.insert(0,self.temperature_sensor.get_temp())
sum = 0.0
for temp in self.history:
sum += temp
average = sum / len(self.history)
self.report(average)
def report(self, temp):
for observer in self.observers:
requests.post(observer, json={"temp": temp})
|
8bd3b1eb25d2207e33cd8970ac2cf739c983e191 | properties/__init__.py | properties/__init__.py | """Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name!', required=True)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
from .images import (
ImagePNG
)
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
| """Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name?')
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
# Attempt to import image classes. Requires:
# >> pip install properties[image]
# or
# >> pip install properties[full]
try:
from .images import (
ImagePNG
)
except ImportError:
pass
# Attempt to import math/array classes. Requires:
# >> pip install properties[math]
# or
# >> pip install properties[full]
try:
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
except ImportError:
pass
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
| Modify init to only import available modules | Modify init to only import available modules
| Python | mit | aranzgeo/properties,3ptscience/properties | """Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name!', required=True)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
from .images import (
ImagePNG
)
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
Modify init to only import available modules | """Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name?')
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
# Attempt to import image classes. Requires:
# >> pip install properties[image]
# or
# >> pip install properties[full]
try:
from .images import (
ImagePNG
)
except ImportError:
pass
# Attempt to import math/array classes. Requires:
# >> pip install properties[math]
# or
# >> pip install properties[full]
try:
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
except ImportError:
pass
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
| <commit_before>"""Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name!', required=True)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
from .images import (
ImagePNG
)
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
<commit_msg>Modify init to only import available modules<commit_after> | """Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name?')
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
# Attempt to import image classes. Requires:
# >> pip install properties[image]
# or
# >> pip install properties[full]
try:
from .images import (
ImagePNG
)
except ImportError:
pass
# Attempt to import math/array classes. Requires:
# >> pip install properties[math]
# or
# >> pip install properties[full]
try:
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
except ImportError:
pass
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
| """Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name!', required=True)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
from .images import (
ImagePNG
)
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
Modify init to only import available modules"""Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name?')
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
# Attempt to import image classes. Requires:
# >> pip install properties[image]
# or
# >> pip install properties[full]
try:
from .images import (
ImagePNG
)
except ImportError:
pass
# Attempt to import math/array classes. Requires:
# >> pip install properties[math]
# or
# >> pip install properties[full]
try:
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
except ImportError:
pass
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
| <commit_before>"""Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name!', required=True)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
from .images import (
ImagePNG
)
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
<commit_msg>Modify init to only import available modules<commit_after>"""Properties
Giving structure (and documentation!) to the properties you use in your
code avoids confusion and allows users to interact flexibly and provide
multiple styles of input, have those inputs validated, and allow you as a
developer to set expectations for what you want to work with.
import properties
class Profile(properties.HasProperties):
name = properties.String('What is your name?')
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .base import (
HasProperties,
Instance,
List,
Union,
)
from .basic import (
Bool,
Color,
Complex,
DateTime,
Float,
GettableProperty,
Integer,
Property,
String,
StringChoice,
Uuid,
)
# Attempt to import image classes. Requires:
# >> pip install properties[image]
# or
# >> pip install properties[full]
try:
from .images import (
ImagePNG
)
except ImportError:
pass
# Attempt to import math/array classes. Requires:
# >> pip install properties[math]
# or
# >> pip install properties[full]
try:
from .math import (
Array,
Vector2,
Vector2Array,
Vector3,
Vector3Array,
)
except ImportError:
pass
from .utils import defaults, filter_props, undefined
from .handlers import observer, validator
from . import task
__version__ = '0.2.3'
__author__ = '3point Science'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 3point Science,'
|
6cac0b8531297dab6bdaff2959646d5a8a90dd01 | parse_vcfFile.py | parse_vcfFile.py | import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
return snps | import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
snps.index = snps.ID
snps = snps.iloc[:,9:].T
return snps | Update VCF parsing to output SNP-column sample-row DataFrame | Update VCF parsing to output SNP-column sample-row DataFrame
| Python | mit | NCBI-Hackathons/Network_Stats_Acc_Interop,NCBI-Hackathons/Network_Stats_Acc_Interop,NCBI-Hackathons/Network_Stats_Acc_Interop,NCBI-Hackathons/Network_Stats_Acc_Interop | import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
return snpsUpdate VCF parsing to output SNP-column sample-row DataFrame | import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
snps.index = snps.ID
snps = snps.iloc[:,9:].T
return snps | <commit_before>import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
return snps<commit_msg>Update VCF parsing to output SNP-column sample-row DataFrame<commit_after> | import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
snps.index = snps.ID
snps = snps.iloc[:,9:].T
return snps | import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
return snpsUpdate VCF parsing to output SNP-column sample-row DataFrameimport pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
snps.index = snps.ID
snps = snps.iloc[:,9:].T
return snps | <commit_before>import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
return snps<commit_msg>Update VCF parsing to output SNP-column sample-row DataFrame<commit_after>import pandas
def read_vcf(filename):
"""
Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample.
:param filename: Path to VCF file
:return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples
"""
vcf = open(filename)
for l in vcf:
if not l.startswith('##'):
header = l.strip().split('\t')
break
snps = pandas.read_table(vcf, names=header)
snps.index = snps.ID
snps = snps.iloc[:,9:].T
return snps |
72bb5cb08d7d0b708b9078b49b8ca8a068d514b9 | account_verification_flask/config.py | account_verification_flask/config.py | class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite:////Work/account_verification.db'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
| class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
| Set default Db to inmemory storage | Set default Db to inmemory storage
| Python | mit | TwilioDevEd/account-verification-flask,TwilioDevEd/account-verification-flask,TwilioDevEd/account-verification-flask | class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite:////Work/account_verification.db'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
Set default Db to inmemory storage | class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
| <commit_before>class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite:////Work/account_verification.db'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
<commit_msg>Set default Db to inmemory storage<commit_after> | class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
| class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite:////Work/account_verification.db'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
Set default Db to inmemory storageclass DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
| <commit_before>class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite:////Work/account_verification.db'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
<commit_msg>Set default Db to inmemory storage<commit_after>class DefaultConfig(object):
SECRET_KEY = '%^!@@*!&$8xdfdirunb52438#(&^874@#^&*($@*(@&^@)(&*)Y_)((+'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class DevelopmentConfig(DefaultConfig):
AUTHY_KEY = 'your_authy_key'
TWILIO_ACCOUNT_SID = 'your_twilio_account_sid'
TWILIO_AUTH_TOKEN = 'your_twilio_auth_token'
TWILIO_NUMBER = 'your_twilio_phone_number'
SQLALCHEMY_DATABASE_URI = 'sqlite://'
DEBUG = True
class TestConfig(DefaultConfig):
SQLALCHEMY_ECHO = True
DEBUG = True
TESTING = True
WTF_CSRF_ENABLED = False
config_env_files = {
'test': 'account_verification_flask.config.TestConfig',
'development': 'account_verification_flask.config.DevelopmentConfig',
}
|
08dcf4de96c6f8890e467e4d17c9449bae8064be | plugins/debug.py | plugins/debug.py | import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"echo (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
| import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"(?:echo|say) (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
| Add say alias to echo | Add say alias to echo
| Python | mit | tomleese/smartbot,Cyanogenoid/smartbot,Muzer/smartbot,thomasleese/smartbot-old | import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"echo (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
Add say alias to echo | import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"(?:echo|say) (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
| <commit_before>import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"echo (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
<commit_msg>Add say alias to echo<commit_after> | import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"(?:echo|say) (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
| import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"echo (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
Add say alias to echoimport time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"(?:echo|say) (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
| <commit_before>import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"echo (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
<commit_msg>Add say alias to echo<commit_after>import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"(?:echo|say) (.*)$", lambda bot, msg, reply: reply(msg["match"][0]))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
|
38b4af0b3c1c6105d68ff453d86107758ef9d751 | preconditions.py | preconditions.py | class PreconditionError (TypeError):
pass
def preconditions(*precs):
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
| import inspect
class PreconditionError (TypeError):
pass
def preconditions(*precs):
precinfo = []
for p in precs:
spec = inspect.getargspec(p)
if spec.varargs or spec.keywords:
raise PreconditionError(
'Precondition {!r} must not accept * nor ** args.'.format(p))
i = -len(spec.defaults)
appargs, closureargs = spec.args[:i], spec.args[i:]
precinfo.append( (appargs, closureargs, p) )
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
| Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function. | Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.
| Python | mit | nejucomo/preconditions | class PreconditionError (TypeError):
pass
def preconditions(*precs):
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function. | import inspect
class PreconditionError (TypeError):
pass
def preconditions(*precs):
precinfo = []
for p in precs:
spec = inspect.getargspec(p)
if spec.varargs or spec.keywords:
raise PreconditionError(
'Precondition {!r} must not accept * nor ** args.'.format(p))
i = -len(spec.defaults)
appargs, closureargs = spec.args[:i], spec.args[i:]
precinfo.append( (appargs, closureargs, p) )
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
| <commit_before>class PreconditionError (TypeError):
pass
def preconditions(*precs):
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
<commit_msg>Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.<commit_after> | import inspect
class PreconditionError (TypeError):
pass
def preconditions(*precs):
precinfo = []
for p in precs:
spec = inspect.getargspec(p)
if spec.varargs or spec.keywords:
raise PreconditionError(
'Precondition {!r} must not accept * nor ** args.'.format(p))
i = -len(spec.defaults)
appargs, closureargs = spec.args[:i], spec.args[i:]
precinfo.append( (appargs, closureargs, p) )
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
| class PreconditionError (TypeError):
pass
def preconditions(*precs):
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.import inspect
class PreconditionError (TypeError):
pass
def preconditions(*precs):
precinfo = []
for p in precs:
spec = inspect.getargspec(p)
if spec.varargs or spec.keywords:
raise PreconditionError(
'Precondition {!r} must not accept * nor ** args.'.format(p))
i = -len(spec.defaults)
appargs, closureargs = spec.args[:i], spec.args[i:]
precinfo.append( (appargs, closureargs, p) )
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
| <commit_before>class PreconditionError (TypeError):
pass
def preconditions(*precs):
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
<commit_msg>Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.<commit_after>import inspect
class PreconditionError (TypeError):
pass
def preconditions(*precs):
precinfo = []
for p in precs:
spec = inspect.getargspec(p)
if spec.varargs or spec.keywords:
raise PreconditionError(
'Precondition {!r} must not accept * nor ** args.'.format(p))
i = -len(spec.defaults)
appargs, closureargs = spec.args[:i], spec.args[i:]
precinfo.append( (appargs, closureargs, p) )
def decorate(f):
def g(*a, **kw):
return f(*a, **kw)
return g
return decorate
|
634442dc25385831ce8f165ab8d9d58493a1979a | pinry/pins/views.py | pinry/pins/views.py | from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args : lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
| from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args: lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
| Fix a small styling error in the lazy_resolve definition | Fix a small styling error in the lazy_resolve definition
| Python | bsd-2-clause | QLGu/pinry,MSylvia/pinry,Stackato-Apps/pinry,pinry/pinry,MSylvia/pinry,lapo-luchini/pinry,supervacuo/pinry,dotcom900825/xishi,lapo-luchini/pinry,lapo-luchini/pinry,Stackato-Apps/pinry,dotcom900825/xishi,wangjun/pinry,lapo-luchini/pinry,supervacuo/pinry,wangjun/pinry,pinry/pinry,MSylvia/pinry,pinry/pinry,rafirosenberg/pinry,pinry/pinry,QLGu/pinry,rafirosenberg/pinry,Stackato-Apps/pinry,supervacuo/pinry,wangjun/pinry,QLGu/pinry | from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args : lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
Fix a small styling error in the lazy_resolve definition | from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args: lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
| <commit_before>from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args : lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
<commit_msg>Fix a small styling error in the lazy_resolve definition<commit_after> | from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args: lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
| from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args : lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
Fix a small styling error in the lazy_resolve definitionfrom django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args: lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
| <commit_before>from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args : lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
<commit_msg>Fix a small styling error in the lazy_resolve definition<commit_after>from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.functional import lazy
from django.views.generic.base import TemplateView
from django.views.generic import CreateView
from .forms import PinForm
from .models import Pin
reverse_lazy = lambda name=None, *args: lazy(reverse, str)(name, args=args)
class RecentPins(TemplateView):
template_name = 'pins/recent_pins.html'
class NewPin(CreateView):
model = Pin
form_class = PinForm
success_url = reverse_lazy('pins:recent-pins')
def form_valid(self, form):
form.instance.submitter = self.request.user
messages.success(self.request, 'New pin successfully added.')
return super(NewPin, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request, 'Pin did not pass validation!')
return super(NewPin, self).form_invalid(form)
def delete_pin(request, pin_id):
try:
pin = Pin.objects.get(id=pin_id)
if pin.submitter == request.user:
pin.delete()
messages.success(request, 'Pin successfully deleted.')
else:
messages.error(request, 'You are not the submitter and can not '
'delete this pin.')
except Pin.DoesNotExist:
messages.error(request, 'Pin with the given id does not exist.')
return HttpResponseRedirect(reverse('pins:recent-pins'))
|
af56e9e9935b2caafe2b13eff3e2dd0d0eb5d980 | pysis/util/__init__.py | pysis/util/__init__.py | # -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from file_manipulation import write_file_list, file_variations, ImageName
| # -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from .file_manipulation import write_file_list, file_variations, ImageName
| Use relative import for file manipulation module. | Use relative import for file manipulation module.
| Python | bsd-3-clause | wtolson/pysis,wtolson/pysis,michaelaye/Pysis,michaelaye/Pysis | # -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from file_manipulation import write_file_list, file_variations, ImageName
Use relative import for file manipulation module. | # -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from .file_manipulation import write_file_list, file_variations, ImageName
| <commit_before># -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from file_manipulation import write_file_list, file_variations, ImageName
<commit_msg>Use relative import for file manipulation module.<commit_after> | # -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from .file_manipulation import write_file_list, file_variations, ImageName
| # -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from file_manipulation import write_file_list, file_variations, ImageName
Use relative import for file manipulation module.# -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from .file_manipulation import write_file_list, file_variations, ImageName
| <commit_before># -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from file_manipulation import write_file_list, file_variations, ImageName
<commit_msg>Use relative import for file manipulation module.<commit_after># -*- coding: utf-8 -*-
"""
Utilities to help with common Isis patterns.
"""
__all__ = [
'write_file_list',
'file_variations',
'ImageName',
]
from .file_manipulation import write_file_list, file_variations, ImageName
|
fc74e6a4bc9992647abbb9f92a7e5880e5c29506 | models.py | models.py | from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
| from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
last_modified = models.DateTimeField()
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
| Add 'last modified' field to Post model | Add 'last modified' field to Post model | Python | mit | SyntaxBlitz/bridie,SyntaxBlitz/bridie | from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
Add 'last modified' field to Post model | from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
last_modified = models.DateTimeField()
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
| <commit_before>from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
<commit_msg>Add 'last modified' field to Post model<commit_after> | from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
last_modified = models.DateTimeField()
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
| from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
Add 'last modified' field to Post modelfrom django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
last_modified = models.DateTimeField()
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
| <commit_before>from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
<commit_msg>Add 'last modified' field to Post model<commit_after>from django.db import models
# Create your models here.
class User(models.Model):
display_name = models.CharField(max_length=64)
auth_key = models.CharField(max_length=64)
class Post(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=4000)
last_modified = models.DateTimeField()
class Comment(models.Model):
user = models.ForeignKey(User)
post = models.ForeignKey(Post)
text = models.CharField(max_length=4000)
|
6989e6b2308cbe496857b5f911c136fcf3043444 | zeus/api/resources/user_token.py | zeus/api/resources/user_token.py | from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
| from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
| Fix user token endpoint authorization | fix(token): Fix user token endpoint authorization
| Python | apache-2.0 | getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus | from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
fix(token): Fix user token endpoint authorization | from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
| <commit_before>from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
<commit_msg>fix(token): Fix user token endpoint authorization<commit_after> | from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
| from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
fix(token): Fix user token endpoint authorizationfrom sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
| <commit_before>from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
<commit_msg>fix(token): Fix user token endpoint authorization<commit_after>from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
|
a8e2f3e00145f56429eb3d01aa08efe329191b18 | src/proposals/admin.py | src/proposals/admin.py | from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
| from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
raw_id_fields = ['submitter']
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
| Make submitter a raw_id_field to prevent long select tag | Make submitter a raw_id_field to prevent long select tag
| Python | mit | pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016 | from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
Make submitter a raw_id_field to prevent long select tag | from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
raw_id_fields = ['submitter']
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
| <commit_before>from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
<commit_msg>Make submitter a raw_id_field to prevent long select tag<commit_after> | from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
raw_id_fields = ['submitter']
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
| from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
Make submitter a raw_id_field to prevent long select tagfrom django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
raw_id_fields = ['submitter']
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
| <commit_before>from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
<commit_msg>Make submitter a raw_id_field to prevent long select tag<commit_after>from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
raw_id_fields = ['submitter']
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
|
43696b102bada7408c5c8151e4ae87e5a2855337 | ds_binary_tree_ft.py | ds_binary_tree_ft.py | def binary_tree(r):
pass
def main():
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def binary_tree(r):
"""Binary tree using list of list."""
return [r, [], []]
def insert_left(root, new_branch):
left_tree = root.pop(1)
if len(left_tree) > 1:
root.insert(1, [new_branch, left_tree, []])
else:
root.insert(1, [new_branch, [], []])
return root
def insert_right(root, new_branch):
right_tree = root.pop(2)
if len(right_tree) > 1:
root.insert(2, [new_branch, [], right_tree])
else:
root.insert(2, [new_branch, [], []])
return root
def get_root_value(root):
return root[0]
def set_root_value(root, new_val):
root[0] = new_val
def get_left_tree(root):
return root[1]
def get_right_tree(root):
return root[2]
def main():
root = binary_tree(3)
print('root: {}'.format(root))
insert_left(root, 4)
print('insert_left(root, 4): {}'.format(root))
insert_left(root, 5)
print('insert_left(root, 5): {}'.format(root))
insert_right(root, 6)
print('insert_right(root, 6): {}'.format(root))
insert_right(root, 7)
print('insert_right(root, 7): {}'.format(root))
left = get_left_tree(root)
print('get_left_tree(root): {}'.format(left))
set_root_value(left, 9)
print('set_root_value(left, 9): {}'.format(left))
print('root: {}'.format(root))
insert_left(left, 11)
print('insert_left(left, 11): {}'.format(left))
print('root: {}'.format(root))
print('Get right tree of right tree:')
print(get_right_tree(get_right_tree(root)))
if __name__ == '__main__':
main()
| Complete ds: binary_tree using ls of ls | Complete ds: binary_tree using ls of ls
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | def binary_tree(r):
pass
def main():
pass
if __name__ == '__main__':
main()
Complete ds: binary_tree using ls of ls | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def binary_tree(r):
"""Binary tree using list of list."""
return [r, [], []]
def insert_left(root, new_branch):
left_tree = root.pop(1)
if len(left_tree) > 1:
root.insert(1, [new_branch, left_tree, []])
else:
root.insert(1, [new_branch, [], []])
return root
def insert_right(root, new_branch):
right_tree = root.pop(2)
if len(right_tree) > 1:
root.insert(2, [new_branch, [], right_tree])
else:
root.insert(2, [new_branch, [], []])
return root
def get_root_value(root):
return root[0]
def set_root_value(root, new_val):
root[0] = new_val
def get_left_tree(root):
return root[1]
def get_right_tree(root):
return root[2]
def main():
root = binary_tree(3)
print('root: {}'.format(root))
insert_left(root, 4)
print('insert_left(root, 4): {}'.format(root))
insert_left(root, 5)
print('insert_left(root, 5): {}'.format(root))
insert_right(root, 6)
print('insert_right(root, 6): {}'.format(root))
insert_right(root, 7)
print('insert_right(root, 7): {}'.format(root))
left = get_left_tree(root)
print('get_left_tree(root): {}'.format(left))
set_root_value(left, 9)
print('set_root_value(left, 9): {}'.format(left))
print('root: {}'.format(root))
insert_left(left, 11)
print('insert_left(left, 11): {}'.format(left))
print('root: {}'.format(root))
print('Get right tree of right tree:')
print(get_right_tree(get_right_tree(root)))
if __name__ == '__main__':
main()
| <commit_before>def binary_tree(r):
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete ds: binary_tree using ls of ls<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def binary_tree(r):
"""Binary tree using list of list."""
return [r, [], []]
def insert_left(root, new_branch):
left_tree = root.pop(1)
if len(left_tree) > 1:
root.insert(1, [new_branch, left_tree, []])
else:
root.insert(1, [new_branch, [], []])
return root
def insert_right(root, new_branch):
right_tree = root.pop(2)
if len(right_tree) > 1:
root.insert(2, [new_branch, [], right_tree])
else:
root.insert(2, [new_branch, [], []])
return root
def get_root_value(root):
return root[0]
def set_root_value(root, new_val):
root[0] = new_val
def get_left_tree(root):
return root[1]
def get_right_tree(root):
return root[2]
def main():
root = binary_tree(3)
print('root: {}'.format(root))
insert_left(root, 4)
print('insert_left(root, 4): {}'.format(root))
insert_left(root, 5)
print('insert_left(root, 5): {}'.format(root))
insert_right(root, 6)
print('insert_right(root, 6): {}'.format(root))
insert_right(root, 7)
print('insert_right(root, 7): {}'.format(root))
left = get_left_tree(root)
print('get_left_tree(root): {}'.format(left))
set_root_value(left, 9)
print('set_root_value(left, 9): {}'.format(left))
print('root: {}'.format(root))
insert_left(left, 11)
print('insert_left(left, 11): {}'.format(left))
print('root: {}'.format(root))
print('Get right tree of right tree:')
print(get_right_tree(get_right_tree(root)))
if __name__ == '__main__':
main()
| def binary_tree(r):
pass
def main():
pass
if __name__ == '__main__':
main()
Complete ds: binary_tree using ls of lsfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def binary_tree(r):
"""Binary tree using list of list."""
return [r, [], []]
def insert_left(root, new_branch):
left_tree = root.pop(1)
if len(left_tree) > 1:
root.insert(1, [new_branch, left_tree, []])
else:
root.insert(1, [new_branch, [], []])
return root
def insert_right(root, new_branch):
right_tree = root.pop(2)
if len(right_tree) > 1:
root.insert(2, [new_branch, [], right_tree])
else:
root.insert(2, [new_branch, [], []])
return root
def get_root_value(root):
return root[0]
def set_root_value(root, new_val):
root[0] = new_val
def get_left_tree(root):
return root[1]
def get_right_tree(root):
return root[2]
def main():
root = binary_tree(3)
print('root: {}'.format(root))
insert_left(root, 4)
print('insert_left(root, 4): {}'.format(root))
insert_left(root, 5)
print('insert_left(root, 5): {}'.format(root))
insert_right(root, 6)
print('insert_right(root, 6): {}'.format(root))
insert_right(root, 7)
print('insert_right(root, 7): {}'.format(root))
left = get_left_tree(root)
print('get_left_tree(root): {}'.format(left))
set_root_value(left, 9)
print('set_root_value(left, 9): {}'.format(left))
print('root: {}'.format(root))
insert_left(left, 11)
print('insert_left(left, 11): {}'.format(left))
print('root: {}'.format(root))
print('Get right tree of right tree:')
print(get_right_tree(get_right_tree(root)))
if __name__ == '__main__':
main()
| <commit_before>def binary_tree(r):
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete ds: binary_tree using ls of ls<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def binary_tree(r):
"""Binary tree using list of list."""
return [r, [], []]
def insert_left(root, new_branch):
left_tree = root.pop(1)
if len(left_tree) > 1:
root.insert(1, [new_branch, left_tree, []])
else:
root.insert(1, [new_branch, [], []])
return root
def insert_right(root, new_branch):
right_tree = root.pop(2)
if len(right_tree) > 1:
root.insert(2, [new_branch, [], right_tree])
else:
root.insert(2, [new_branch, [], []])
return root
def get_root_value(root):
return root[0]
def set_root_value(root, new_val):
root[0] = new_val
def get_left_tree(root):
return root[1]
def get_right_tree(root):
return root[2]
def main():
root = binary_tree(3)
print('root: {}'.format(root))
insert_left(root, 4)
print('insert_left(root, 4): {}'.format(root))
insert_left(root, 5)
print('insert_left(root, 5): {}'.format(root))
insert_right(root, 6)
print('insert_right(root, 6): {}'.format(root))
insert_right(root, 7)
print('insert_right(root, 7): {}'.format(root))
left = get_left_tree(root)
print('get_left_tree(root): {}'.format(left))
set_root_value(left, 9)
print('set_root_value(left, 9): {}'.format(left))
print('root: {}'.format(root))
insert_left(left, 11)
print('insert_left(left, 11): {}'.format(left))
print('root: {}'.format(root))
print('Get right tree of right tree:')
print(get_right_tree(get_right_tree(root)))
if __name__ == '__main__':
main()
|
6c78f66cda7842894ba11109bee602633edd87b5 | symposion/reviews/forms.py | symposion/reviews/forms.py | from django import forms
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": MarkEdit()}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
| from django import forms
from django.forms import Textarea
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": Textarea(attrs={'class': 'fullwidth-textarea'})}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
| Make review feedback not a Markdown widget | Make review feedback not a Markdown widget
Review feedback wasn't supposed to be in markdown. Change the
widget to a regular text area.
| Python | bsd-3-clause | pyconjp/pyconjp-website,Diwahars/pycon,PyCon/pycon,njl/pycon,njl/pycon,PyCon/pycon,Diwahars/pycon,osmfj/sotmjp-website,smellman/sotmjp-website,smellman/sotmjp-website,PyCon/pycon,pyconjp/pyconjp-website,osmfj/sotmjp-website,Diwahars/pycon,njl/pycon,pyconjp/pyconjp-website,osmfj/sotmjp-website,pyconjp/pyconjp-website,smellman/sotmjp-website,Diwahars/pycon,njl/pycon,osmfj/sotmjp-website,PyCon/pycon,smellman/sotmjp-website | from django import forms
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": MarkEdit()}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
Make review feedback not a Markdown widget
Review feedback wasn't supposed to be in markdown. Change the
widget to a regular text area. | from django import forms
from django.forms import Textarea
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": Textarea(attrs={'class': 'fullwidth-textarea'})}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
| <commit_before>from django import forms
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": MarkEdit()}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
<commit_msg>Make review feedback not a Markdown widget
Review feedback wasn't supposed to be in markdown. Change the
widget to a regular text area.<commit_after> | from django import forms
from django.forms import Textarea
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": Textarea(attrs={'class': 'fullwidth-textarea'})}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
| from django import forms
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": MarkEdit()}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
Make review feedback not a Markdown widget
Review feedback wasn't supposed to be in markdown. Change the
widget to a regular text area.from django import forms
from django.forms import Textarea
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": Textarea(attrs={'class': 'fullwidth-textarea'})}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
| <commit_before>from django import forms
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": MarkEdit()}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
<commit_msg>Make review feedback not a Markdown widget
Review feedback wasn't supposed to be in markdown. Change the
widget to a regular text area.<commit_after>from django import forms
from django.forms import Textarea
from markedit.widgets import MarkEdit
from symposion.reviews.models import Review, Comment, ProposalMessage, VOTES
class ReviewForm(forms.ModelForm):
class Meta:
model = Review
fields = ["vote", "comment"]
widgets = {"comment": MarkEdit()}
def __init__(self, *args, **kwargs):
super(ReviewForm, self).__init__(*args, **kwargs)
self.fields["vote"] = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=VOTES.CHOICES
)
class ReviewCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["text"]
widgets = {"text": MarkEdit()}
class SpeakerCommentForm(forms.ModelForm):
class Meta:
model = ProposalMessage
fields = ["message"]
widgets = {"message": Textarea(attrs={'class': 'fullwidth-textarea'})}
class BulkPresentationForm(forms.Form):
talk_ids = forms.CharField(
max_length=500,
help_text="Provide a comma seperated list of talk ids to accept."
)
|
3d0d917aed8aa73f8f73601666141b9acf72120e | server.py | server.py | import StringIO
import base64
import signal
from flask import Flask, render_template, request, make_response
from quiver.plotter import FieldPlotter
app = Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True) | import StringIO
import base64
import signal
import flask
from quiver.plotter import FieldPlotter
app = flask.Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return flask.render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = flask.request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = flask.make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return flask.make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = flask.request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True) | Change import level of flask. | Change import level of flask.
| Python | mit | davidsoncasey/quiver,davidsoncasey/quiver,davidsoncasey/quiver,davidsoncasey/quiver,davidsoncasey/quiver | import StringIO
import base64
import signal
from flask import Flask, render_template, request, make_response
from quiver.plotter import FieldPlotter
app = Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True)Change import level of flask. | import StringIO
import base64
import signal
import flask
from quiver.plotter import FieldPlotter
app = flask.Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return flask.render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = flask.request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = flask.make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return flask.make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = flask.request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True) | <commit_before>import StringIO
import base64
import signal
from flask import Flask, render_template, request, make_response
from quiver.plotter import FieldPlotter
app = Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True)<commit_msg>Change import level of flask.<commit_after> | import StringIO
import base64
import signal
import flask
from quiver.plotter import FieldPlotter
app = flask.Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return flask.render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = flask.request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = flask.make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return flask.make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = flask.request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True) | import StringIO
import base64
import signal
from flask import Flask, render_template, request, make_response
from quiver.plotter import FieldPlotter
app = Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True)Change import level of flask.import StringIO
import base64
import signal
import flask
from quiver.plotter import FieldPlotter
app = flask.Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return flask.render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = flask.request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = flask.make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return flask.make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = flask.request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True) | <commit_before>import StringIO
import base64
import signal
from flask import Flask, render_template, request, make_response
from quiver.plotter import FieldPlotter
app = Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True)<commit_msg>Change import level of flask.<commit_after>import StringIO
import base64
import signal
import flask
from quiver.plotter import FieldPlotter
app = flask.Flask(__name__)
@app.route('/')
def quiver():
'''Route for homepage'''
return flask.render_template('quiver.html')
@app.route('/plot/', methods=['GET',])
def plot():
equation_string = flask.request.args.get('equation')
diff_equation = FieldPlotter()
diff_equation.set_equation_from_string(equation_string)
diff_equation.make_plot()
# If plotting was successful, write plot out
if diff_equation.figure:
# Write output to memory and add to response object
output = StringIO.StringIO()
response = flask.make_response(base64.b64encode(diff_equation.write_data(output)))
response.mimetype = 'image/png'
return response
else:
return flask.make_response('')
@app.route('/data/', methods=['GET',])
def data():
equation_string = flask.request.args.get('equation')
plotter = FieldPlotter()
plotter.set_equation_from_string(equation_string)
plotter.make_data()
if __name__ == '__main__':
app.run(debug=True) |
742f4a4e97caa32483d65244b7e7a705763c67bf | TestRig/TestProgram/DetectPlatform.py | TestRig/TestProgram/DetectPlatform.py | import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
SERIAL_DEVICE_PATH = "/dev/cu.usb*"
else:
# TODO: linux?
SERIAL_DEVICE_PATH = "/dev/ttyACM*"
return glob.glob(SERIAL_DEVICE_PATH)
| import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
ports = glob.glob("/dev/cu.usb*")
else:
# TODO: linux?
ports = glob.glob("/dev/ttyACM*")
ports.extend(glob.glob("/dev/ttyUSB*"))
return ports
| Add support for FTDI devices under linux | Add support for FTDI devices under linux
| Python | apache-2.0 | Blinkinlabs/BlinkyTape,Blinkinlabs/BlinkyTape,Blinkinlabs/BlinkyTape | import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
SERIAL_DEVICE_PATH = "/dev/cu.usb*"
else:
# TODO: linux?
SERIAL_DEVICE_PATH = "/dev/ttyACM*"
return glob.glob(SERIAL_DEVICE_PATH)
Add support for FTDI devices under linux | import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
ports = glob.glob("/dev/cu.usb*")
else:
# TODO: linux?
ports = glob.glob("/dev/ttyACM*")
ports.extend(glob.glob("/dev/ttyUSB*"))
return ports
| <commit_before>import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
SERIAL_DEVICE_PATH = "/dev/cu.usb*"
else:
# TODO: linux?
SERIAL_DEVICE_PATH = "/dev/ttyACM*"
return glob.glob(SERIAL_DEVICE_PATH)
<commit_msg>Add support for FTDI devices under linux<commit_after> | import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
ports = glob.glob("/dev/cu.usb*")
else:
# TODO: linux?
ports = glob.glob("/dev/ttyACM*")
ports.extend(glob.glob("/dev/ttyUSB*"))
return ports
| import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
SERIAL_DEVICE_PATH = "/dev/cu.usb*"
else:
# TODO: linux?
SERIAL_DEVICE_PATH = "/dev/ttyACM*"
return glob.glob(SERIAL_DEVICE_PATH)
Add support for FTDI devices under linuximport subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
ports = glob.glob("/dev/cu.usb*")
else:
# TODO: linux?
ports = glob.glob("/dev/ttyACM*")
ports.extend(glob.glob("/dev/ttyUSB*"))
return ports
| <commit_before>import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
SERIAL_DEVICE_PATH = "/dev/cu.usb*"
else:
# TODO: linux?
SERIAL_DEVICE_PATH = "/dev/ttyACM*"
return glob.glob(SERIAL_DEVICE_PATH)
<commit_msg>Add support for FTDI devices under linux<commit_after>import subprocess
import time
import glob
def detectPlatform():
data = []
proc = subprocess.Popen(["uname"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
while True:
read = proc.stdout.readline() #block / wait
if not read:
break
data.append(read)
if data[0] == 'Darwin\n':
return "Darwin"
return "Unknown"
def ListSerialPorts():
# Scan for all connected devices; platform dependent
platform = detectPlatform()
if platform == 'Darwin':
ports = glob.glob("/dev/cu.usb*")
else:
# TODO: linux?
ports = glob.glob("/dev/ttyACM*")
ports.extend(glob.glob("/dev/ttyUSB*"))
return ports
|
82eb7a69ccb88d27141aeb483e4482041108723f | app/Display/display.py | app/Display/display.py | import sys
ESC = chr(27)
CLEAR = ESC + "[2J"
MOVE_HOME = ESC + "[H"
ERASE = CLEAR + MOVE_HOME
LINES = 24
COLS = 80
class Display:
def __init__(self, title):
self.title = title
def clear(self):
sys.stdout.write(ERASE)
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title)
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
def print(self, message):
print(message, end="\x0a\x0d")
| import sys
ESC = chr(27)
CSI = ESC + "["
CLEAR = CSI + "2J"
MOVE_HOME = CSI + "H"
ERASE = CLEAR + MOVE_HOME
MOVE_TO = CSI + "{0};{1}H"
LINES = 24
COLS = 80
class Display:
def __init__(self, title, info=None):
self.title = title
self.info = info
def clear(self):
sys.stdout.write(ERASE)
sys.stdout.flush()
def move_to(self, row, col):
sys.stdout.write(MOVE_TO.format(row, col))
sys.stdout.flush()
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title.center(COLS))
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
if self.info is not None:
self.move_to(LINES, 0)
sys.stdout.write(self.info)
sys.stdout.flush()
self.move_to(LINES, 0)
def print(self, message):
print(message, end="\x0a\x0d")
| Add support for cursor position, centered title, and an info bar | Add support for cursor position, centered title, and an info bar
| Python | mit | gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x | import sys
ESC = chr(27)
CLEAR = ESC + "[2J"
MOVE_HOME = ESC + "[H"
ERASE = CLEAR + MOVE_HOME
LINES = 24
COLS = 80
class Display:
def __init__(self, title):
self.title = title
def clear(self):
sys.stdout.write(ERASE)
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title)
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
def print(self, message):
print(message, end="\x0a\x0d")
Add support for cursor position, centered title, and an info bar | import sys
ESC = chr(27)
CSI = ESC + "["
CLEAR = CSI + "2J"
MOVE_HOME = CSI + "H"
ERASE = CLEAR + MOVE_HOME
MOVE_TO = CSI + "{0};{1}H"
LINES = 24
COLS = 80
class Display:
def __init__(self, title, info=None):
self.title = title
self.info = info
def clear(self):
sys.stdout.write(ERASE)
sys.stdout.flush()
def move_to(self, row, col):
sys.stdout.write(MOVE_TO.format(row, col))
sys.stdout.flush()
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title.center(COLS))
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
if self.info is not None:
self.move_to(LINES, 0)
sys.stdout.write(self.info)
sys.stdout.flush()
self.move_to(LINES, 0)
def print(self, message):
print(message, end="\x0a\x0d")
| <commit_before>import sys
ESC = chr(27)
CLEAR = ESC + "[2J"
MOVE_HOME = ESC + "[H"
ERASE = CLEAR + MOVE_HOME
LINES = 24
COLS = 80
class Display:
def __init__(self, title):
self.title = title
def clear(self):
sys.stdout.write(ERASE)
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title)
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
def print(self, message):
print(message, end="\x0a\x0d")
<commit_msg>Add support for cursor position, centered title, and an info bar<commit_after> | import sys
ESC = chr(27)
CSI = ESC + "["
CLEAR = CSI + "2J"
MOVE_HOME = CSI + "H"
ERASE = CLEAR + MOVE_HOME
MOVE_TO = CSI + "{0};{1}H"
LINES = 24
COLS = 80
class Display:
def __init__(self, title, info=None):
self.title = title
self.info = info
def clear(self):
sys.stdout.write(ERASE)
sys.stdout.flush()
def move_to(self, row, col):
sys.stdout.write(MOVE_TO.format(row, col))
sys.stdout.flush()
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title.center(COLS))
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
if self.info is not None:
self.move_to(LINES, 0)
sys.stdout.write(self.info)
sys.stdout.flush()
self.move_to(LINES, 0)
def print(self, message):
print(message, end="\x0a\x0d")
| import sys
ESC = chr(27)
CLEAR = ESC + "[2J"
MOVE_HOME = ESC + "[H"
ERASE = CLEAR + MOVE_HOME
LINES = 24
COLS = 80
class Display:
def __init__(self, title):
self.title = title
def clear(self):
sys.stdout.write(ERASE)
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title)
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
def print(self, message):
print(message, end="\x0a\x0d")
Add support for cursor position, centered title, and an info barimport sys
ESC = chr(27)
CSI = ESC + "["
CLEAR = CSI + "2J"
MOVE_HOME = CSI + "H"
ERASE = CLEAR + MOVE_HOME
MOVE_TO = CSI + "{0};{1}H"
LINES = 24
COLS = 80
class Display:
def __init__(self, title, info=None):
self.title = title
self.info = info
def clear(self):
sys.stdout.write(ERASE)
sys.stdout.flush()
def move_to(self, row, col):
sys.stdout.write(MOVE_TO.format(row, col))
sys.stdout.flush()
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title.center(COLS))
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
if self.info is not None:
self.move_to(LINES, 0)
sys.stdout.write(self.info)
sys.stdout.flush()
self.move_to(LINES, 0)
def print(self, message):
print(message, end="\x0a\x0d")
| <commit_before>import sys
ESC = chr(27)
CLEAR = ESC + "[2J"
MOVE_HOME = ESC + "[H"
ERASE = CLEAR + MOVE_HOME
LINES = 24
COLS = 80
class Display:
def __init__(self, title):
self.title = title
def clear(self):
sys.stdout.write(ERASE)
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title)
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
def print(self, message):
print(message, end="\x0a\x0d")
<commit_msg>Add support for cursor position, centered title, and an info bar<commit_after>import sys
ESC = chr(27)
CSI = ESC + "["
CLEAR = CSI + "2J"
MOVE_HOME = CSI + "H"
ERASE = CLEAR + MOVE_HOME
MOVE_TO = CSI + "{0};{1}H"
LINES = 24
COLS = 80
class Display:
def __init__(self, title, info=None):
self.title = title
self.info = info
def clear(self):
sys.stdout.write(ERASE)
sys.stdout.flush()
def move_to(self, row, col):
sys.stdout.write(MOVE_TO.format(row, col))
sys.stdout.flush()
def show_properties(self, properties, names=None):
if names is None:
names = properties.keys()
max_len = max(map(len, names))
self.clear()
self.print(self.title.center(COLS))
print()
for k in names:
self.print("{0}: {1}".format(k.rjust(max_len), properties[k]))
if self.info is not None:
self.move_to(LINES, 0)
sys.stdout.write(self.info)
sys.stdout.flush()
self.move_to(LINES, 0)
def print(self, message):
print(message, end="\x0a\x0d")
|
fe4f2697afdc280e0158aad1acf9613d1decb32d | project_template.py | project_template.py | import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
folder = folders[0]
print(folder)
| import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
| Implement checking the format of settings | Implement checking the format of settings
| Python | mit | autopp/SublimeProjectTemplate,autopp/SublimeProjectTemplate | import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
folder = folders[0]
print(folder)
Implement checking the format of settings | import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
| <commit_before>import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
folder = folders[0]
print(folder)
<commit_msg>Implement checking the format of settings<commit_after> | import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
| import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
folder = folders[0]
print(folder)
Implement checking the format of settingsimport sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
| <commit_before>import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
folder = folders[0]
print(folder)
<commit_msg>Implement checking the format of settings<commit_after>import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
|
a08bd02fcd255d19991398444a5d1ec0d11409d2 | test/test_featurecounts.py | test/test_featurecounts.py | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
| import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
assert (
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
)
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus, tolerance=0.1)
assert False
except IOError:
assert True
| Fix syntax in featureCounts test | Fix syntax in featureCounts test
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
Fix syntax in featureCounts test | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
assert (
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
)
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus, tolerance=0.1)
assert False
except IOError:
assert True
| <commit_before>import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
<commit_msg>Fix syntax in featureCounts test<commit_after> | import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
assert (
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
)
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus, tolerance=0.1)
assert False
except IOError:
assert True
| import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
Fix syntax in featureCounts testimport sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
assert (
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
)
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus, tolerance=0.1)
assert False
except IOError:
assert True
| <commit_before>import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
try:
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus,
tolerance=0.1)
assert False
except IOError:
assert True
<commit_msg>Fix syntax in featureCounts test<commit_after>import sequana.featurecounts as fc
from sequana import sequana_data
def test_featurecounts():
RNASEQ_DIR_0 = sequana_data("featurecounts") + "/rnaseq_0"
RNASEQ_DIR_1 = sequana_data("featurecounts") + "/rnaseq_1"
RNASEQ_DIR_2 = sequana_data("featurecounts") + "/rnaseq_2"
RNASEQ_DIR_undef = sequana_data("featurecounts") + "/rnaseq_undef"
RNASEQ_DIR_noconsensus = sequana_data("featurecounts") + "/rnaseq_noconsensus"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_0, tolerance=0.1) == "0"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_1, tolerance=0.1) == "1"
assert fc.get_most_probable_strand_consensus(RNASEQ_DIR_2, tolerance=0.1) == "2"
assert (
fc.get_most_probable_strand_consensus(RNASEQ_DIR_undef, tolerance=0.1) == "NA"
)
try:
fc.get_most_probable_strand_consensus(RNASEQ_DIR_noconsensus, tolerance=0.1)
assert False
except IOError:
assert True
|
c38b9f378f7dbf968ee6bcb7b3f0625a1993d61d | tests/core/test_history.py | tests/core/test_history.py | from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertTrue(track.name in track_ref.name)
if track.artists:
for artist in track.artists:
self.assertTrue(artist.name in track_ref.name)
| from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertIn(track.name, track_ref.name)
if track.artists:
for artist in track.artists:
self.assertIn(artist.name, track_ref.name)
| Use assertIn instead of assertTrue to test membership. | Use assertIn instead of assertTrue to test membership.
| Python | apache-2.0 | swak/mopidy,ali/mopidy,mokieyue/mopidy,tkem/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,tkem/mopidy,bacontext/mopidy,adamcik/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,mopidy/mopidy,bencevans/mopidy,bacontext/mopidy,jcass77/mopidy,jodal/mopidy,diandiankan/mopidy,vrs01/mopidy,bencevans/mopidy,priestd09/mopidy,hkariti/mopidy,ZenithDK/mopidy,adamcik/mopidy,glogiotatidis/mopidy,jcass77/mopidy,jmarsik/mopidy,pacificIT/mopidy,adamcik/mopidy,rawdlite/mopidy,jodal/mopidy,vrs01/mopidy,mokieyue/mopidy,quartz55/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,mopidy/mopidy,mopidy/mopidy,bacontext/mopidy,ali/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,swak/mopidy,bencevans/mopidy,tkem/mopidy,priestd09/mopidy,pacificIT/mopidy,swak/mopidy,vrs01/mopidy,rawdlite/mopidy,diandiankan/mopidy,hkariti/mopidy,dbrgn/mopidy,diandiankan/mopidy,quartz55/mopidy,hkariti/mopidy,swak/mopidy,bencevans/mopidy,jmarsik/mopidy,priestd09/mopidy,woutervanwijk/mopidy,kingosticks/mopidy,quartz55/mopidy,ali/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,quartz55/mopidy,dbrgn/mopidy,woutervanwijk/mopidy,jmarsik/mopidy,rawdlite/mopidy,ZenithDK/mopidy,pacificIT/mopidy,diandiankan/mopidy,hkariti/mopidy,jcass77/mopidy,tkem/mopidy,jodal/mopidy,pacificIT/mopidy,mokieyue/mopidy,rawdlite/mopidy,jmarsik/mopidy,vrs01/mopidy,mokieyue/mopidy,dbrgn/mopidy,ali/mopidy,bacontext/mopidy | from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertTrue(track.name in track_ref.name)
if track.artists:
for artist in track.artists:
self.assertTrue(artist.name in track_ref.name)
Use assertIn instead of assertTrue to test membership. | from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertIn(track.name, track_ref.name)
if track.artists:
for artist in track.artists:
self.assertIn(artist.name, track_ref.name)
| <commit_before>from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertTrue(track.name in track_ref.name)
if track.artists:
for artist in track.artists:
self.assertTrue(artist.name in track_ref.name)
<commit_msg>Use assertIn instead of assertTrue to test membership.<commit_after> | from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertIn(track.name, track_ref.name)
if track.artists:
for artist in track.artists:
self.assertIn(artist.name, track_ref.name)
| from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertTrue(track.name in track_ref.name)
if track.artists:
for artist in track.artists:
self.assertTrue(artist.name in track_ref.name)
Use assertIn instead of assertTrue to test membership.from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertIn(track.name, track_ref.name)
if track.artists:
for artist in track.artists:
self.assertIn(artist.name, track_ref.name)
| <commit_before>from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertTrue(track.name in track_ref.name)
if track.artists:
for artist in track.artists:
self.assertTrue(artist.name in track_ref.name)
<commit_msg>Use assertIn instead of assertTrue to test membership.<commit_after>from __future__ import unicode_literals
import unittest
from mopidy.core import History
from mopidy.models import Artist, Track
class PlaybackHistoryTest(unittest.TestCase):
def setUp(self):
self.tracks = [
Track(uri='dummy1:a', name='foo',
artists=[Artist(name='foober'), Artist(name='barber')]),
Track(uri='dummy2:a', name='foo'),
Track(uri='dummy3:a', name='bar')
]
self.history = History()
def test_add_track(self):
self.history.add(self.tracks[0])
self.history.add(self.tracks[1])
self.history.add(self.tracks[2])
self.assertEqual(self.history.size, 3)
def test_unsuitable_add(self):
size = self.history.size
self.history.add(self.tracks[0])
self.history.add(object())
self.history.add(self.tracks[1])
self.assertEqual(self.history.size, size + 2)
def test_history_sanity(self):
track = self.tracks[0]
self.history.add(track)
stored_history = self.history.get_history()
track_ref = stored_history[0][1]
self.assertEqual(track_ref.uri, track.uri)
self.assertIn(track.name, track_ref.name)
if track.artists:
for artist in track.artists:
self.assertIn(artist.name, track_ref.name)
|
92da4abbcf1551d87192b627b3c5f44f2fe82e91 | quickplots/textsize.py | quickplots/textsize.py | """Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return 10
| """Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return int(height)
| Make very basic font size calculator | Make very basic font size calculator
| Python | mit | samirelanduk/quickplots | """Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return 10
Make very basic font size calculator | """Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return int(height)
| <commit_before>"""Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return 10
<commit_msg>Make very basic font size calculator<commit_after> | """Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return int(height)
| """Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return 10
Make very basic font size calculator"""Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return int(height)
| <commit_before>"""Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return 10
<commit_msg>Make very basic font size calculator<commit_after>"""Functions for working out what font_size text needs to be"""
def get_font_size(s, width, height):
return int(height)
|
e3287b9669e07dc4265efe768ba2b4c3351839d3 | base_geoengine/geo_db.py | base_geoengine/geo_db.py | # Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
"Error, can not automatically initialize spatial postgis support. "
"Database user may have to be superuser and postgres/postgis "
"extentions with their devel header have to be installed. "
"If you do not want Odoo to connect with a super user "
"you can manually prepare your database. To do this"
"open a client to your database using a super user and run: \n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
| # Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo import _
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
_(
"Error, can not automatically initialize spatial postgis"
" support. Database user may have to be superuser and"
" postgres/postgis extensions with their devel header have"
" to be installed. If you do not want Odoo to connect with a"
" super user you can manually prepare your database. To do"
" this, open a client to your database using a super user and"
" run:\n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
)
| Make MissingError translatable for auto install of postgis extension, plus fix some typos | Make MissingError translatable for auto install of postgis extension, plus fix some typos
| Python | agpl-3.0 | OCA/geospatial,OCA/geospatial,OCA/geospatial | # Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
"Error, can not automatically initialize spatial postgis support. "
"Database user may have to be superuser and postgres/postgis "
"extentions with their devel header have to be installed. "
"If you do not want Odoo to connect with a super user "
"you can manually prepare your database. To do this"
"open a client to your database using a super user and run: \n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
Make MissingError translatable for auto install of postgis extension, plus fix some typos | # Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo import _
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
_(
"Error, can not automatically initialize spatial postgis"
" support. Database user may have to be superuser and"
" postgres/postgis extensions with their devel header have"
" to be installed. If you do not want Odoo to connect with a"
" super user you can manually prepare your database. To do"
" this, open a client to your database using a super user and"
" run:\n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
)
| <commit_before># Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
"Error, can not automatically initialize spatial postgis support. "
"Database user may have to be superuser and postgres/postgis "
"extentions with their devel header have to be installed. "
"If you do not want Odoo to connect with a super user "
"you can manually prepare your database. To do this"
"open a client to your database using a super user and run: \n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
<commit_msg>Make MissingError translatable for auto install of postgis extension, plus fix some typos<commit_after> | # Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo import _
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
_(
"Error, can not automatically initialize spatial postgis"
" support. Database user may have to be superuser and"
" postgres/postgis extensions with their devel header have"
" to be installed. If you do not want Odoo to connect with a"
" super user you can manually prepare your database. To do"
" this, open a client to your database using a super user and"
" run:\n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
)
| # Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
"Error, can not automatically initialize spatial postgis support. "
"Database user may have to be superuser and postgres/postgis "
"extentions with their devel header have to be installed. "
"If you do not want Odoo to connect with a super user "
"you can manually prepare your database. To do this"
"open a client to your database using a super user and run: \n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
Make MissingError translatable for auto install of postgis extension, plus fix some typos# Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo import _
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
_(
"Error, can not automatically initialize spatial postgis"
" support. Database user may have to be superuser and"
" postgres/postgis extensions with their devel header have"
" to be installed. If you do not want Odoo to connect with a"
" super user you can manually prepare your database. To do"
" this, open a client to your database using a super user and"
" run:\n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
)
| <commit_before># Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
"Error, can not automatically initialize spatial postgis support. "
"Database user may have to be superuser and postgres/postgis "
"extentions with their devel header have to be installed. "
"If you do not want Odoo to connect with a super user "
"you can manually prepare your database. To do this"
"open a client to your database using a super user and run: \n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
<commit_msg>Make MissingError translatable for auto install of postgis extension, plus fix some typos<commit_after># Copyright 2011-2012 Nicolas Bessi (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
"""Helper to setup Postgis"""
import logging
from odoo import _
from odoo.exceptions import MissingError
logger = logging.getLogger('geoengine.sql')
def init_postgis(cr):
""" Initialize postgis
Add PostGIS support to the database. PostGIS is a spatial database
extender for PostgreSQL object-relational database. It adds support for
geographic objects allowing location queries to be run in SQL.
"""
cr.execute("""
SELECT
tablename
FROM
pg_tables
WHERE
tablename='spatial_ref_sys';
""")
check = cr.fetchone()
if check:
return {}
try:
cr.execute("""
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
""")
except Exception:
raise MissingError(
_(
"Error, can not automatically initialize spatial postgis"
" support. Database user may have to be superuser and"
" postgres/postgis extensions with their devel header have"
" to be installed. If you do not want Odoo to connect with a"
" super user you can manually prepare your database. To do"
" this, open a client to your database using a super user and"
" run:\n"
"CREATE EXTENSION postgis;\n"
"CREATE EXTENSION postgis_topology;\n"
)
)
|
aa436864f53a4c77b4869baabfb1478d7fea36f0 | tests/products/__init__.py | tests/products/__init__.py | """ Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
| """ Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
| Allow str type comparison in py2/3 | Allow str type comparison in py2/3
| Python | bsd-3-clause | ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tiles,ceholden/tilezilla,ceholden/landsat_tile | """ Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
Allow str type comparison in py2/3 | """ Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
| <commit_before>""" Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
<commit_msg>Allow str type comparison in py2/3<commit_after> | """ Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
| """ Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
Allow str type comparison in py2/3""" Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
| <commit_before>""" Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': str,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': str,
'instrument': str,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, type):
assert isinstance(value, _type)
else:
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
<commit_msg>Allow str type comparison in py2/3<commit_after>""" Test utilities for ensuring the correctness of products
"""
import arrow
import six
from tilezilla.core import BoundingBox, Band
MAPPING = {
'timeseries_id': six.string_types,
'acquired': arrow.Arrow,
'processed': arrow.Arrow,
'platform': six.string_types,
'instrument': six.string_types,
'bounds': BoundingBox,
'bands': [Band],
'metadata': dict,
'metadata_files': dict
}
def check_attributes(product):
for attr, _type in six.iteritems(MAPPING):
assert hasattr(product, attr)
value = getattr(product, attr)
if isinstance(_type, (type, tuple)):
# Type declaration one or more types
assert isinstance(value, _type)
else:
# Type declaration list of types
assert isinstance(value, type(_type))
for item in value:
assert isinstance(item, tuple(_type))
|
f2bf7807754d13c92bd2901072dd804dda61805f | cla_public/apps/contact/constants.py | cla_public/apps/contact/constants.py | # -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in the next week on'))
)
| # -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in on'))
)
| Update button label (call back time picker) | FE: Update button label (call back time picker)
| Python | mit | ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public | # -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in the next week on'))
)
FE: Update button label (call back time picker) | # -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in on'))
)
| <commit_before># -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in the next week on'))
)
<commit_msg>FE: Update button label (call back time picker)<commit_after> | # -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in on'))
)
| # -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in the next week on'))
)
FE: Update button label (call back time picker)# -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in on'))
)
| <commit_before># -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in the next week on'))
)
<commit_msg>FE: Update button label (call back time picker)<commit_after># -*- coding: utf-8 -*-
"Contact constants"
from flask.ext.babel import lazy_gettext as _
DAY_TODAY = 'today'
DAY_SPECIFIC = 'specific_day'
DAY_CHOICES = (
(DAY_TODAY, _('Call me today at')),
(DAY_SPECIFIC, _('Call me in on'))
)
|
74d6b8bf119eca7e3f8f1d49f3c8d82b726f3062 | faq/search_indexes.py | faq/search_indexes.py | # -*- coding: utf-8 -*-
from haystack import indexes
from haystack.sites import site
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase):
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase):
def get_queryset(self):
return Question.objects.published()
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
| # -*- coding: utf-8 -*-
"""
Haystack SearchIndexes for FAQ objects.
Note that these are compatible with both Haystack 1.0 and Haystack 2.0-beta.
The super class for these indexes can be customized by using the
``FAQ_SEARCH_INDEX`` setting.
"""
from haystack import indexes
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
# Haystack 2.0 (commit 070d46d72f92) requires that concrete SearchIndex classes
# use the indexes.Indexable mixin. Here we workaround that so our SearchIndex
# classes also work for Haystack 1.X.
try:
mixin = indexes.Indexable
except AttributeError:
mixin = object
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Topic
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Question
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Question.objects.published()
# try/except in order to register search indexes with site for Haystack 1.X
# without throwing exceptions with Haystack 2.0.
try:
from haystack.sites import site
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
except ImportError:
pass
| Add compatibility for Haystack 2.0. | Add compatibility for Haystack 2.0.
| Python | bsd-3-clause | benspaulding/django-faq | # -*- coding: utf-8 -*-
from haystack import indexes
from haystack.sites import site
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase):
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase):
def get_queryset(self):
return Question.objects.published()
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
Add compatibility for Haystack 2.0. | # -*- coding: utf-8 -*-
"""
Haystack SearchIndexes for FAQ objects.
Note that these are compatible with both Haystack 1.0 and Haystack 2.0-beta.
The super class for these indexes can be customized by using the
``FAQ_SEARCH_INDEX`` setting.
"""
from haystack import indexes
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
# Haystack 2.0 (commit 070d46d72f92) requires that concrete SearchIndex classes
# use the indexes.Indexable mixin. Here we workaround that so our SearchIndex
# classes also work for Haystack 1.X.
try:
mixin = indexes.Indexable
except AttributeError:
mixin = object
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Topic
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Question
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Question.objects.published()
# try/except in order to register search indexes with site for Haystack 1.X
# without throwing exceptions with Haystack 2.0.
try:
from haystack.sites import site
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
except ImportError:
pass
| <commit_before># -*- coding: utf-8 -*-
from haystack import indexes
from haystack.sites import site
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase):
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase):
def get_queryset(self):
return Question.objects.published()
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
<commit_msg>Add compatibility for Haystack 2.0.<commit_after> | # -*- coding: utf-8 -*-
"""
Haystack SearchIndexes for FAQ objects.
Note that these are compatible with both Haystack 1.0 and Haystack 2.0-beta.
The super class for these indexes can be customized by using the
``FAQ_SEARCH_INDEX`` setting.
"""
from haystack import indexes
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
# Haystack 2.0 (commit 070d46d72f92) requires that concrete SearchIndex classes
# use the indexes.Indexable mixin. Here we workaround that so our SearchIndex
# classes also work for Haystack 1.X.
try:
mixin = indexes.Indexable
except AttributeError:
mixin = object
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Topic
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Question
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Question.objects.published()
# try/except in order to register search indexes with site for Haystack 1.X
# without throwing exceptions with Haystack 2.0.
try:
from haystack.sites import site
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
except ImportError:
pass
| # -*- coding: utf-8 -*-
from haystack import indexes
from haystack.sites import site
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase):
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase):
def get_queryset(self):
return Question.objects.published()
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
Add compatibility for Haystack 2.0.# -*- coding: utf-8 -*-
"""
Haystack SearchIndexes for FAQ objects.
Note that these are compatible with both Haystack 1.0 and Haystack 2.0-beta.
The super class for these indexes can be customized by using the
``FAQ_SEARCH_INDEX`` setting.
"""
from haystack import indexes
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
# Haystack 2.0 (commit 070d46d72f92) requires that concrete SearchIndex classes
# use the indexes.Indexable mixin. Here we workaround that so our SearchIndex
# classes also work for Haystack 1.X.
try:
mixin = indexes.Indexable
except AttributeError:
mixin = object
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Topic
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Question
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Question.objects.published()
# try/except in order to register search indexes with site for Haystack 1.X
# without throwing exceptions with Haystack 2.0.
try:
from haystack.sites import site
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
except ImportError:
pass
| <commit_before># -*- coding: utf-8 -*-
from haystack import indexes
from haystack.sites import site
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase):
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase):
def get_queryset(self):
return Question.objects.published()
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
<commit_msg>Add compatibility for Haystack 2.0.<commit_after># -*- coding: utf-8 -*-
"""
Haystack SearchIndexes for FAQ objects.
Note that these are compatible with both Haystack 1.0 and Haystack 2.0-beta.
The super class for these indexes can be customized by using the
``FAQ_SEARCH_INDEX`` setting.
"""
from haystack import indexes
from faq.settings import SEARCH_INDEX
from faq.models import Topic, Question
# Haystack 2.0 (commit 070d46d72f92) requires that concrete SearchIndex classes
# use the indexes.Indexable mixin. Here we workaround that so our SearchIndex
# classes also work for Haystack 1.X.
try:
mixin = indexes.Indexable
except AttributeError:
mixin = object
class FAQIndexBase(SEARCH_INDEX):
text = indexes.CharField(document=True, use_template=True)
url = indexes.CharField(model_attr='get_absolute_url', indexed=False)
class TopicIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Topic
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Topic.objects.published()
class QuestionIndex(FAQIndexBase, mixin):
# Required method for Haystack 2.0, but harmless on 1.X.
def get_model(self):
return Question
# ``get_queryset`` is deprecated in Haystack v2, and ``index_queryset``
# ought to be used instead. But we must use the former to support
# Haystack < 1.2.4. Support for such older version is likely to be dropped
# in the near future.
def get_queryset(self):
return Question.objects.published()
# try/except in order to register search indexes with site for Haystack 1.X
# without throwing exceptions with Haystack 2.0.
try:
from haystack.sites import site
site.register(Topic, TopicIndex)
site.register(Question, QuestionIndex)
except ImportError:
pass
|
f1cf2af003f7d8686a8b54ca822ff9aaa7fcc96b | clifford/_version.py | clifford/_version.py | # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
| # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev1'
| Create a pre-release version for PyPI, to test tweaks to readme and setup.py. | Create a pre-release version for PyPI, to test tweaks to readme and setup.py.
| Python | bsd-3-clause | arsenovic/clifford,arsenovic/clifford | # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
Create a pre-release version for PyPI, to test tweaks to readme and setup.py. | # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev1'
| <commit_before># Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
<commit_msg>Create a pre-release version for PyPI, to test tweaks to readme and setup.py.<commit_after> | # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev1'
| # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
Create a pre-release version for PyPI, to test tweaks to readme and setup.py.# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev1'
| <commit_before># Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
<commit_msg>Create a pre-release version for PyPI, to test tweaks to readme and setup.py.<commit_after># Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev1'
|
6e77aff69adba0ded366a704bdafb601514faf5d | salt/thorium/runner.py | salt/thorium/runner.py | # -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
| # -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
| Fix local opts from CLI | Fix local opts from CLI
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | # -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
Fix local opts from CLI | # -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
| <commit_before># -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
<commit_msg>Fix local opts from CLI<commit_after> | # -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
| # -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
Fix local opts from CLI# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
| <commit_before># -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
<commit_msg>Fix local opts from CLI<commit_after># -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
2cb406cac1a6faf1f2f79c1376ceac39871fb96e | pony_barn/build-django.py | pony_barn/build-django.py | import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| Make it so that django build actually uses it's own code. | Make it so that django build actually uses it's own code.
| Python | mit | ericholscher/pony_barn,ericholscher/pony_barn | import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
Make it so that django build actually uses it's own code. | import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| <commit_before>import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
<commit_msg>Make it so that django build actually uses it's own code.<commit_after> | import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
Make it so that django build actually uses it's own code.import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| <commit_before>import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
<commit_msg>Make it so that django build actually uses it's own code.<commit_after>import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
|
3b9105868cdeac3647aa9052532ce55d7c9b8dcd | bureaucrate/__main__.py | bureaucrate/__main__.py | from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
| from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
import logging
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
parser.add_argument('--debug', dest='loglevel', help='enable debug logging',
action='store_const', const=logging.DEBUG,
default=logging.WARNING)
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
from .bureaucrate import logger
logger.setLevel(opts.get('loglevel'))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
| Add debug flag, printing debug info | Add debug flag, printing debug info
| Python | mit | paulollivier/bureaucrate | from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
Add debug flag, printing debug info | from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
import logging
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
parser.add_argument('--debug', dest='loglevel', help='enable debug logging',
action='store_const', const=logging.DEBUG,
default=logging.WARNING)
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
from .bureaucrate import logger
logger.setLevel(opts.get('loglevel'))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
| <commit_before>from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
<commit_msg>Add debug flag, printing debug info<commit_after> | from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
import logging
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
parser.add_argument('--debug', dest='loglevel', help='enable debug logging',
action='store_const', const=logging.DEBUG,
default=logging.WARNING)
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
from .bureaucrate import logger
logger.setLevel(opts.get('loglevel'))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
| from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
Add debug flag, printing debug infofrom sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
import logging
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
parser.add_argument('--debug', dest='loglevel', help='enable debug logging',
action='store_const', const=logging.DEBUG,
default=logging.WARNING)
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
from .bureaucrate import logger
logger.setLevel(opts.get('loglevel'))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
| <commit_before>from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
<commit_msg>Add debug flag, printing debug info<commit_after>from sys import argv
from os.path import expanduser, join
from argparse import ArgumentParser
import logging
from . import __version__
from .bureaucrate import init
from .utils import Config
def process_account(conf: Config, account: str):
acc = init(join(conf.get("base_path"), account))
for mailbox in conf.get_mailboxes(account):
for message in acc[mailbox]:
message.exec_rules(conf.get('rules', [], mailbox, account))
def main():
parser = ArgumentParser()
parser.add_argument('--version', help="returns the version and exists")
parser.add_argument('-a', '--account', help="Restrict to an account")
parser.add_argument('-c', '--config', default='~/.bureaucraterc',
help='specify an alternate configuration file')
parser.add_argument('--debug', dest='loglevel', help='enable debug logging',
action='store_const', const=logging.DEBUG,
default=logging.WARNING)
opts = vars(parser.parse_args(argv[1:]))
conf = Config()
conf.parse(expanduser(opts.get('config')))
from .bureaucrate import logger
logger.setLevel(opts.get('loglevel'))
if opts.get('version', None):
print("bureaucrate v{}".format(__version__))
return
if opts.get('account', None):
process_account(conf, opts.get('account'))
else:
for account in conf.get_accounts():
process_account(conf, account)
if '__main__' in __name__:
main()
|
d4dc4effd92196e9ff18c04f217ded3cf1352103 | project/api/forms.py | project/api/forms.py | # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['person'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| Fix 'add user' form in Admin | Fix 'add user' form in Admin
| Python | bsd-2-clause | barberscore/barberscore-api,dbinetti/barberscore,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api | # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
Fix 'add user' form in Admin | # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['person'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| <commit_before># Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
<commit_msg>Fix 'add user' form in Admin<commit_after> | # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['person'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
Fix 'add user' form in Admin# Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['person'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| <commit_before># Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
<commit_msg>Fix 'add user' form in Admin<commit_after># Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['person'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
|
0dc217bd0cec8a0321dfc38b88696514179bf833 | editorconfig/__init__.py | editorconfig/__init__.py | """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "development")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| Upgrade version to 0.11.3 final | Upgrade version to 0.11.3 final
| Python | bsd-2-clause | VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,johnfraney/editorconfig-vim,VictorBjelkholm/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,benjifisher/editorconfig-vim | """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "development")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
Upgrade version to 0.11.3 final | """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| <commit_before>"""EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "development")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
<commit_msg>Upgrade version to 0.11.3 final<commit_after> | """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| """EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "development")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
Upgrade version to 0.11.3 final"""EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
| <commit_before>"""EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "development")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
<commit_msg>Upgrade version to 0.11.3 final<commit_after>"""EditorConfig Python Core"""
from editorconfig.versiontools import join_version
VERSION = (0, 11, 3, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
"""Locate and parse EditorConfig files for the given filename"""
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from editorconfig.handler import EditorConfigHandler
from editorconfig.exceptions import *
|
574dd4ef0aa0d6381938a0638e497374434cb75e | lilkv/columnfamily.py | lilkv/columnfamily.py | # -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
self.ROWS = set()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS.add(column)
return True
except:
return False
def __repr__(self):
return '<%r>' % self.name
| # -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
# A row consists of:
# {'rowkey': [col1, col2, col3]}
self.ROWS = dict()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS[column.row_key].append(column)
return True
except KeyError: # Key doesn't exist
self.ROWS[column.row_key] = [column]
except:
return False
def __repr__(self):
return '<%r>' % self.name
| Store rows as dictionaries of lists. | Store rows as dictionaries of lists.
| Python | mit | pgorla/lil-kv | # -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
self.ROWS = set()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS.add(column)
return True
except:
return False
def __repr__(self):
return '<%r>' % self.name
Store rows as dictionaries of lists. | # -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
# A row consists of:
# {'rowkey': [col1, col2, col3]}
self.ROWS = dict()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS[column.row_key].append(column)
return True
except KeyError: # Key doesn't exist
self.ROWS[column.row_key] = [column]
except:
return False
def __repr__(self):
return '<%r>' % self.name
| <commit_before># -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
self.ROWS = set()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS.add(column)
return True
except:
return False
def __repr__(self):
return '<%r>' % self.name
<commit_msg>Store rows as dictionaries of lists.<commit_after> | # -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
# A row consists of:
# {'rowkey': [col1, col2, col3]}
self.ROWS = dict()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS[column.row_key].append(column)
return True
except KeyError: # Key doesn't exist
self.ROWS[column.row_key] = [column]
except:
return False
def __repr__(self):
return '<%r>' % self.name
| # -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
self.ROWS = set()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS.add(column)
return True
except:
return False
def __repr__(self):
return '<%r>' % self.name
Store rows as dictionaries of lists.# -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
# A row consists of:
# {'rowkey': [col1, col2, col3]}
self.ROWS = dict()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS[column.row_key].append(column)
return True
except KeyError: # Key doesn't exist
self.ROWS[column.row_key] = [column]
except:
return False
def __repr__(self):
return '<%r>' % self.name
| <commit_before># -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
self.ROWS = set()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS.add(column)
return True
except:
return False
def __repr__(self):
return '<%r>' % self.name
<commit_msg>Store rows as dictionaries of lists.<commit_after># -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
# A row consists of:
# {'rowkey': [col1, col2, col3]}
self.ROWS = dict()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS[column.row_key].append(column)
return True
except KeyError: # Key doesn't exist
self.ROWS[column.row_key] = [column]
except:
return False
def __repr__(self):
return '<%r>' % self.name
|
3ce3e00b0ad3910c99e6d42b21e8d6839b4a2214 | sympy/sets/condset.py | sympy/sets/condset.py | from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
cond = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
| from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
lamda = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
| Fix property name in CondSet | Fix property name in CondSet
Signed-off-by: Harsh Gupta <c4bd8559369e527b4bb1785ff84e8ff50fde87c0@gmail.com>
| Python | bsd-3-clause | oliverlee/sympy,Vishluck/sympy,skidzo/sympy,drufat/sympy,sampadsaha5/sympy,chaffra/sympy,souravsingh/sympy,AkademieOlympia/sympy,kaichogami/sympy,grevutiu-gabriel/sympy,ga7g08/sympy,pandeyadarsh/sympy,mcdaniel67/sympy,jerli/sympy,AkademieOlympia/sympy,ahhda/sympy,emon10005/sympy,mafiya69/sympy,jerli/sympy,rahuldan/sympy,sahmed95/sympy,Davidjohnwilson/sympy,shikil/sympy,lindsayad/sympy,grevutiu-gabriel/sympy,chaffra/sympy,yukoba/sympy,aktech/sympy,Davidjohnwilson/sympy,ahhda/sympy,abhiii5459/sympy,VaibhavAgarwalVA/sympy,jbbskinny/sympy,Designist/sympy,kaichogami/sympy,cswiercz/sympy,kaushik94/sympy,oliverlee/sympy,abhiii5459/sympy,pandeyadarsh/sympy,jaimahajan1997/sympy,farhaanbukhsh/sympy,skidzo/sympy,souravsingh/sympy,rahuldan/sympy,jbbskinny/sympy,souravsingh/sympy,jerli/sympy,mafiya69/sympy,sampadsaha5/sympy,iamutkarshtiwari/sympy,hargup/sympy,VaibhavAgarwalVA/sympy,ahhda/sympy,farhaanbukhsh/sympy,yashsharan/sympy,yashsharan/sympy,Vishluck/sympy,grevutiu-gabriel/sympy,kevalds51/sympy,sahmed95/sympy,iamutkarshtiwari/sympy,Davidjohnwilson/sympy,yukoba/sympy,farhaanbukhsh/sympy,jaimahajan1997/sympy,Curious72/sympy,Vishluck/sympy,AkademieOlympia/sympy,wanglongqi/sympy,yashsharan/sympy,cswiercz/sympy,mcdaniel67/sympy,jbbskinny/sympy,kumarkrishna/sympy,debugger22/sympy,emon10005/sympy,madan96/sympy,abhiii5459/sympy,mafiya69/sympy,debugger22/sympy,lindsayad/sympy,kevalds51/sympy,wanglongqi/sympy,Designist/sympy,postvakje/sympy,MechCoder/sympy,kaichogami/sympy,jaimahajan1997/sympy,saurabhjn76/sympy,VaibhavAgarwalVA/sympy,Curious72/sympy,maniteja123/sympy,kumarkrishna/sympy,Curious72/sympy,ChristinaZografou/sympy,hargup/sympy,saurabhjn76/sympy,MechCoder/sympy,rahuldan/sympy,debugger22/sympy,moble/sympy,sampadsaha5/sympy,madan96/sympy,yukoba/sympy,chaffra/sympy,madan96/sympy,Titan-C/sympy,Arafatk/sympy,Shaswat27/sympy,postvakje/sympy,Shaswat27/sympy,hargup/sympy,lindsayad/sympy,iamutkarshtiwari/sympy,skidzo/sympy,wanglongqi/sympy,aktech/sympy,Shaswat27/sympy,Titan-C/sympy,moble/sympy,drufat/sympy,emon10005/sympy,atreyv/sympy,kevalds51/sympy,saurabhjn76/sympy,mcdaniel67/sympy,drufat/sympy,ga7g08/sympy,sahmed95/sympy,cswiercz/sympy,Titan-C/sympy,ga7g08/sympy,atreyv/sympy,postvakje/sympy,moble/sympy,Designist/sympy,oliverlee/sympy,maniteja123/sympy,shikil/sympy,kumarkrishna/sympy,maniteja123/sympy,pandeyadarsh/sympy,atreyv/sympy,MechCoder/sympy,ChristinaZografou/sympy,Arafatk/sympy,ChristinaZografou/sympy,Arafatk/sympy,kaushik94/sympy,kaushik94/sympy,shikil/sympy,aktech/sympy | from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
cond = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
Fix property name in CondSet
Signed-off-by: Harsh Gupta <c4bd8559369e527b4bb1785ff84e8ff50fde87c0@gmail.com> | from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
lamda = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
| <commit_before>from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
cond = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
<commit_msg>Fix property name in CondSet
Signed-off-by: Harsh Gupta <c4bd8559369e527b4bb1785ff84e8ff50fde87c0@gmail.com><commit_after> | from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
lamda = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
| from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
cond = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
Fix property name in CondSet
Signed-off-by: Harsh Gupta <c4bd8559369e527b4bb1785ff84e8ff50fde87c0@gmail.com>from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
lamda = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
| <commit_before>from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
cond = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
<commit_msg>Fix property name in CondSet
Signed-off-by: Harsh Gupta <c4bd8559369e527b4bb1785ff84e8ff50fde87c0@gmail.com><commit_after>from __future__ import print_function, division
from sympy.core.basic import Basic
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S
from sympy.core.sympify import _sympify
from sympy.core.decorators import deprecated
from sympy.core.function import Lambda
class CondSet(Set):
"""
Set of elements which satisfies a given condition.
{x | cond(x) is True for x in S}
Examples
========
>>> from sympy import Symbol, S, CondSet, FiniteSet, Lambda, pi
>>> x = Symbol('x')
>>> sin_sols = CondSet(Lambda(x, Eq(sin(x), 0)), S.Reals)
>>> 2*pi in sin_sols
True
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
lamda = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
# XXX: probably we should check if self.cond is returning only true or
# false
return self.cond(other)
|
780a224f055fef4aba302334050c7679b4e675f2 | sanitize.py | sanitize.py | #!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import codecs
import os
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
file_path = os.path.dirname(os.path.abspath(__file__)) + '/' + args.inputfile
with codecs.open(file_path, 'r', 'utf-8') as json_data:
items = json.load(json_data)
for i, item in enumerate(items):
pass
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
| Read JSON file to be processed | Read JSON file to be processed
| Python | mit | staceytay/workabroad-scraper | #!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
Read JSON file to be processed | #!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import codecs
import os
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
file_path = os.path.dirname(os.path.abspath(__file__)) + '/' + args.inputfile
with codecs.open(file_path, 'r', 'utf-8') as json_data:
items = json.load(json_data)
for i, item in enumerate(items):
pass
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
<commit_msg>Read JSON file to be processed<commit_after> | #!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import codecs
import os
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
file_path = os.path.dirname(os.path.abspath(__file__)) + '/' + args.inputfile
with codecs.open(file_path, 'r', 'utf-8') as json_data:
items = json.load(json_data)
for i, item in enumerate(items):
pass
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
Read JSON file to be processed#!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import codecs
import os
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
file_path = os.path.dirname(os.path.abspath(__file__)) + '/' + args.inputfile
with codecs.open(file_path, 'r', 'utf-8') as json_data:
items = json.load(json_data)
for i, item in enumerate(items):
pass
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
<commit_msg>Read JSON file to be processed<commit_after>#!/usr/bin/env python
"""
This script cleans and processes JSON data scraped, using Scrapy, from
workabroad.ph.
"""
import argparse
import codecs
import os
import json
import sys
def main():
parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data")
parser.add_argument("export", help="Export file format, 'csv' or 'json'")
parser.add_argument("inputfile", help="Text file to be parsed")
parser.add_argument("outputfile", help="Name of file to export data to")
parser.add_argument("-v", "--verbose", help="Increase output verbosity, "
"use when debugging only", action="store_true")
global args
args = parser.parse_args()
file_path = os.path.dirname(os.path.abspath(__file__)) + '/' + args.inputfile
with codecs.open(file_path, 'r', 'utf-8') as json_data:
items = json.load(json_data)
for i, item in enumerate(items):
pass
if args.export == "csv":
pass
elif args.export == "json":
pass
else:
sys.exit("Invalid export file format: " + args.export + ", only 'csv' and "
"'json' is accepted")
if __name__ == '__main__':
main()
|
b573936d30840e8173d5ca59117a45ceae8dc2e6 | hoomd/_operations.py | hoomd/_operations.py | import hoomd._integrator
class Operations:
def __init__(self, simulation=None):
self.simulation = None
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._set_integrator(op)
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
def _set_integrator(self, integrator):
self._integrator = integrator
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
| import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
| Fix typo in constructor and remove set_integrator method | Fix typo in constructor and remove set_integrator method
| Python | bsd-3-clause | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | import hoomd._integrator
class Operations:
def __init__(self, simulation=None):
self.simulation = None
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._set_integrator(op)
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
def _set_integrator(self, integrator):
self._integrator = integrator
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
Fix typo in constructor and remove set_integrator method | import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
| <commit_before>import hoomd._integrator
class Operations:
def __init__(self, simulation=None):
self.simulation = None
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._set_integrator(op)
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
def _set_integrator(self, integrator):
self._integrator = integrator
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
<commit_msg>Fix typo in constructor and remove set_integrator method<commit_after> | import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
| import hoomd._integrator
class Operations:
def __init__(self, simulation=None):
self.simulation = None
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._set_integrator(op)
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
def _set_integrator(self, integrator):
self._integrator = integrator
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
Fix typo in constructor and remove set_integrator methodimport hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
| <commit_before>import hoomd._integrator
class Operations:
def __init__(self, simulation=None):
self.simulation = None
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._set_integrator(op)
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
def _set_integrator(self, integrator):
self._integrator = integrator
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
<commit_msg>Fix typo in constructor and remove set_integrator method<commit_after>import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._auto_schedule = False
self._compute = list()
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
def _store_reader(self, reader):
# TODO
pass
|
188fcafc6855da9cb7f946b3605ddca2bd20a0b8 | qiprofile_rest/routers.py | qiprofile_rest/routers.py | from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject_detail', SubjectDetailViewSet)
router.register(r'session_detail', SessionDetailViewSet)
| from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject[_-]detail', SubjectDetailViewSet)
router.register(r'session[_-]detail', SessionDetailViewSet)
| Allow underscore or dash in URL. | Allow underscore or dash in URL.
| Python | bsd-2-clause | ohsu-qin/qiprofile-rest,ohsu-qin/qirest | from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject_detail', SubjectDetailViewSet)
router.register(r'session_detail', SessionDetailViewSet)
Allow underscore or dash in URL. | from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject[_-]detail', SubjectDetailViewSet)
router.register(r'session[_-]detail', SessionDetailViewSet)
| <commit_before>from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject_detail', SubjectDetailViewSet)
router.register(r'session_detail', SessionDetailViewSet)
<commit_msg>Allow underscore or dash in URL.<commit_after> | from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject[_-]detail', SubjectDetailViewSet)
router.register(r'session[_-]detail', SessionDetailViewSet)
| from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject_detail', SubjectDetailViewSet)
router.register(r'session_detail', SessionDetailViewSet)
Allow underscore or dash in URL.from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject[_-]detail', SubjectDetailViewSet)
router.register(r'session[_-]detail', SessionDetailViewSet)
| <commit_before>from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject_detail', SubjectDetailViewSet)
router.register(r'session_detail', SessionDetailViewSet)
<commit_msg>Allow underscore or dash in URL.<commit_after>from rest_framework import routers
from .views import (UserViewSet, SubjectViewSet,
SubjectDetailViewSet, SessionDetailViewSet)
router = routers.SimpleRouter()
router.register(r'user', UserViewSet)
router.register(r'subjects', SubjectViewSet)
router.register(r'subject[_-]detail', SubjectDetailViewSet)
router.register(r'session[_-]detail', SessionDetailViewSet)
|
dba0a9b80eb5dd90bcf1184fabe998973fbd2393 | pacasus.py | pacasus.py | #!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
print('Program ended. The message was: ', ','.join(exception.args))
print("Please use the option --help for information on command line arguments.")
| #!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
if len(exception.args) > 0:
print('Program ended. The message was: ' + ','.join(exception.args))
else:
print('Program ended. The message was: {}'.format(exception))
print("Please use the option --help for information on command line arguments.")
| Print exception when no other info on exception is available | Print exception when no other info on exception is available | Python | mit | swarris/Pacasus | #!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
print('Program ended. The message was: ', ','.join(exception.args))
print("Please use the option --help for information on command line arguments.")
Print exception when no other info on exception is available | #!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
if len(exception.args) > 0:
print('Program ended. The message was: ' + ','.join(exception.args))
else:
print('Program ended. The message was: {}'.format(exception))
print("Please use the option --help for information on command line arguments.")
| <commit_before>#!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
print('Program ended. The message was: ', ','.join(exception.args))
print("Please use the option --help for information on command line arguments.")
<commit_msg>Print exception when no other info on exception is available<commit_after> | #!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
if len(exception.args) > 0:
print('Program ended. The message was: ' + ','.join(exception.args))
else:
print('Program ended. The message was: {}'.format(exception))
print("Please use the option --help for information on command line arguments.")
| #!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
print('Program ended. The message was: ', ','.join(exception.args))
print("Please use the option --help for information on command line arguments.")
Print exception when no other info on exception is available#!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
if len(exception.args) > 0:
print('Program ended. The message was: ' + ','.join(exception.args))
else:
print('Program ended. The message was: {}'.format(exception))
print("Please use the option --help for information on command line arguments.")
| <commit_before>#!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
print('Program ended. The message was: ', ','.join(exception.args))
print("Please use the option --help for information on command line arguments.")
<commit_msg>Print exception when no other info on exception is available<commit_after>#!/usr/bin/python
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) +"/pypaswas")
from pacasus.pacasusall import Pacasus
import logging
if __name__ == '__main__':
try:
ppw = Pacasus()
ppw.run()
except Exception as exception:
# Show complete exception when running in DEBUG
if (hasattr(ppw.settings, 'loglevel') and
getattr(logging, 'DEBUG') == ppw.logger.getEffectiveLevel()):
ppw.logger.exception(str(exception))
else:
if len(exception.args) > 0:
print('Program ended. The message was: ' + ','.join(exception.args))
else:
print('Program ended. The message was: {}'.format(exception))
print("Please use the option --help for information on command line arguments.")
|
ee5de7b17aea09bf06da3e79bc21cde4473969f9 | nethud/proto/telnet.py | nethud/proto/telnet.py | from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
| from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
class TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
| Define class with `class`, not `def` | Define class with `class`, not `def`
I don't make calsses much, can you tell?
| Python | mit | ryansb/netHUD | from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
Define class with `class`, not `def`
I don't make calsses much, can you tell? | from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
class TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
| <commit_before>from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
<commit_msg>Define class with `class`, not `def`
I don't make calsses much, can you tell?<commit_after> | from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
class TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
| from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
Define class with `class`, not `def`
I don't make calsses much, can you tell?from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
class TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
| <commit_before>from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
<commit_msg>Define class with `class`, not `def`
I don't make calsses much, can you tell?<commit_after>from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.uname in self.users:
del self.users[self.uname]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.transport.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(self, uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
class TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
|
8915729158c9b5c22d16a4c2deee66f79a8276b9 | apps/local_apps/account/middleware.py | apps/local_apps/account/middleware.py | from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
| from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except Account.DoesNotExist:
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
| Throw 500 error on multiple account in LocaleMiddleware so we can fix them. | Throw 500 error on multiple account in LocaleMiddleware so we can fix them.
| Python | mit | ingenieroariel/pinax,ingenieroariel/pinax | from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
Throw 500 error on multiple account in LocaleMiddleware so we can fix them. | from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except Account.DoesNotExist:
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
| <commit_before>from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
<commit_msg>Throw 500 error on multiple account in LocaleMiddleware so we can fix them.<commit_after> | from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except Account.DoesNotExist:
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
| from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
Throw 500 error on multiple account in LocaleMiddleware so we can fix them.from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except Account.DoesNotExist:
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
| <commit_before>from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
<commit_msg>Throw 500 error on multiple account in LocaleMiddleware so we can fix them.<commit_after>from django.utils.cache import patch_vary_headers
from django.utils import translation
from account.models import Account
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context depending on the user's account. This allows pages
to be dynamically translated to the language the user desires
(if the language is available, of course).
"""
def get_language_for_user(self, request):
if request.user.is_authenticated():
try:
account = Account.objects.get(user=request.user)
return account.language
except Account.DoesNotExist:
pass
return translation.get_language_from_request(request)
def process_request(self, request):
translation.activate(self.get_language_for_user(request))
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
patch_vary_headers(response, ('Accept-Language',))
response['Content-Language'] = translation.get_language()
translation.deactivate()
return response
|
b8d0398f5c134c3457f9b09c3457e0f882b75732 | lutrisweb/settings/production.py | lutrisweb/settings/production.py | import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
| import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
| Store sessions in cache + database | Store sessions in cache + database
| Python | agpl-3.0 | lutris/website,lutris/website,Turupawn/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website,Turupawn/website | import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
Store sessions in cache + database | import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
| <commit_before>import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
<commit_msg>Store sessions in cache + database<commit_after> | import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
| import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
Store sessions in cache + databaseimport os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
| <commit_before>import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
<commit_msg>Store sessions in cache + database<commit_after>import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
|
f4d703fb38c8d4efbff709a6e3c7478b7cf96db2 | code/conditional_switch_as_else_if.py | code/conditional_switch_as_else_if.py | score = 76
if score < 60:
grade = 'F'
elif score < 70:
grade = 'D'
elif score < 80:
grade = 'C'
elif score < 90:
grade = 'B'
else:
grade = 'A'
print(grade)
| score = 76
grades = [
(60, 'F'),
(70, 'D'),
(80, 'C'),
(90, 'B'),
]
print(next((g for x, g in grades if score < x), 'A'))
| Replace swith statement with a mapping | Replace swith statement with a mapping
Official Python documentation explains [1], that switch statement can be
replaced by `if .. elif .. else` or by a mapping.
In first example with varying condtitions `if .. elif .. else` was used, and in
this example a mapping is much better fitted way of doing a switch statement
logic.
Since this is an official and documented way of doing switch statement logic in
Python, am adding it here.
[1] https://docs.python.org/3/faq/design.html#why-isn-t-there-a-switch-or-case-statement-in-python
| Python | mit | evmorov/lang-compare,evmorov/lang-compare,evmorov/lang-compare,Evmorov/ruby-coffeescript,Evmorov/ruby-coffeescript,Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare,evmorov/lang-compare | score = 76
if score < 60:
grade = 'F'
elif score < 70:
grade = 'D'
elif score < 80:
grade = 'C'
elif score < 90:
grade = 'B'
else:
grade = 'A'
print(grade)
Replace swith statement with a mapping
Official Python documentation explains [1], that switch statement can be
replaced by `if .. elif .. else` or by a mapping.
In first example with varying condtitions `if .. elif .. else` was used, and in
this example a mapping is much better fitted way of doing a switch statement
logic.
Since this is an official and documented way of doing switch statement logic in
Python, am adding it here.
[1] https://docs.python.org/3/faq/design.html#why-isn-t-there-a-switch-or-case-statement-in-python | score = 76
grades = [
(60, 'F'),
(70, 'D'),
(80, 'C'),
(90, 'B'),
]
print(next((g for x, g in grades if score < x), 'A'))
| <commit_before>score = 76
if score < 60:
grade = 'F'
elif score < 70:
grade = 'D'
elif score < 80:
grade = 'C'
elif score < 90:
grade = 'B'
else:
grade = 'A'
print(grade)
<commit_msg>Replace swith statement with a mapping
Official Python documentation explains [1], that switch statement can be
replaced by `if .. elif .. else` or by a mapping.
In first example with varying condtitions `if .. elif .. else` was used, and in
this example a mapping is much better fitted way of doing a switch statement
logic.
Since this is an official and documented way of doing switch statement logic in
Python, am adding it here.
[1] https://docs.python.org/3/faq/design.html#why-isn-t-there-a-switch-or-case-statement-in-python<commit_after> | score = 76
grades = [
(60, 'F'),
(70, 'D'),
(80, 'C'),
(90, 'B'),
]
print(next((g for x, g in grades if score < x), 'A'))
| score = 76
if score < 60:
grade = 'F'
elif score < 70:
grade = 'D'
elif score < 80:
grade = 'C'
elif score < 90:
grade = 'B'
else:
grade = 'A'
print(grade)
Replace swith statement with a mapping
Official Python documentation explains [1], that switch statement can be
replaced by `if .. elif .. else` or by a mapping.
In first example with varying condtitions `if .. elif .. else` was used, and in
this example a mapping is much better fitted way of doing a switch statement
logic.
Since this is an official and documented way of doing switch statement logic in
Python, am adding it here.
[1] https://docs.python.org/3/faq/design.html#why-isn-t-there-a-switch-or-case-statement-in-pythonscore = 76
grades = [
(60, 'F'),
(70, 'D'),
(80, 'C'),
(90, 'B'),
]
print(next((g for x, g in grades if score < x), 'A'))
| <commit_before>score = 76
if score < 60:
grade = 'F'
elif score < 70:
grade = 'D'
elif score < 80:
grade = 'C'
elif score < 90:
grade = 'B'
else:
grade = 'A'
print(grade)
<commit_msg>Replace swith statement with a mapping
Official Python documentation explains [1], that switch statement can be
replaced by `if .. elif .. else` or by a mapping.
In first example with varying condtitions `if .. elif .. else` was used, and in
this example a mapping is much better fitted way of doing a switch statement
logic.
Since this is an official and documented way of doing switch statement logic in
Python, am adding it here.
[1] https://docs.python.org/3/faq/design.html#why-isn-t-there-a-switch-or-case-statement-in-python<commit_after>score = 76
grades = [
(60, 'F'),
(70, 'D'),
(80, 'C'),
(90, 'B'),
]
print(next((g for x, g in grades if score < x), 'A'))
|
4583c9949143e58bf400fc86e27d634aa382f605 | tests/test_expanded.py | tests/test_expanded.py | from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """-[ RECORD 0 ]
name | hello
age | 123
-[ RECORD 1 ]
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
| from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """***************************[ 1. row ]***************************
name | hello
age | 123
***************************[ 2. row ]***************************
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
| Update expanded tests to match mysql style. | Update expanded tests to match mysql style.
| Python | bsd-3-clause | oguzy/mycli,chenpingzhao/mycli,ZuoGuocai/mycli,evook/mycli,jinstrive/mycli,j-bennet/mycli,danieljwest/mycli,suzukaze/mycli,thanatoskira/mycli,chenpingzhao/mycli,j-bennet/mycli,brewneaux/mycli,webwlsong/mycli,MnO2/rediscli,brewneaux/mycli,shoma/mycli,mdsrosa/mycli,oguzy/mycli,danieljwest/mycli,jinstrive/mycli,thanatoskira/mycli,ZuoGuocai/mycli,webwlsong/mycli,evook/mycli,suzukaze/mycli,D-e-e-m-o/mycli,MnO2/rediscli,shoma/mycli,martijnengler/mycli,mdsrosa/mycli,martijnengler/mycli,D-e-e-m-o/mycli | from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """-[ RECORD 0 ]
name | hello
age | 123
-[ RECORD 1 ]
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
Update expanded tests to match mysql style. | from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """***************************[ 1. row ]***************************
name | hello
age | 123
***************************[ 2. row ]***************************
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
| <commit_before>from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """-[ RECORD 0 ]
name | hello
age | 123
-[ RECORD 1 ]
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
<commit_msg>Update expanded tests to match mysql style.<commit_after> | from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """***************************[ 1. row ]***************************
name | hello
age | 123
***************************[ 2. row ]***************************
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
| from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """-[ RECORD 0 ]
name | hello
age | 123
-[ RECORD 1 ]
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
Update expanded tests to match mysql style.from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """***************************[ 1. row ]***************************
name | hello
age | 123
***************************[ 2. row ]***************************
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
| <commit_before>from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """-[ RECORD 0 ]
name | hello
age | 123
-[ RECORD 1 ]
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
<commit_msg>Update expanded tests to match mysql style.<commit_after>from mycli.packages.expanded import expanded_table
def test_expanded_table_renders():
input = [("hello", 123), ("world", 456)]
expected = """***************************[ 1. row ]***************************
name | hello
age | 123
***************************[ 2. row ]***************************
name | world
age | 456
"""
assert expected == expanded_table(input, ["name", "age"])
|
e09b8488b14cac72a8724a72b44e272df5a52db4 | tests/test_pyserial.py | tests/test_pyserial.py | from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(comports) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
| from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
| Convert 'comports' to a list before getting its length (the value may be a generator). | Convert 'comports' to a list before getting its length (the value may be
a generator).
| Python | agpl-3.0 | makerbot/s3g,Jnesselr/s3g,makerbot/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g | from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(comports) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
Convert 'comports' to a list before getting its length (the value may be
a generator). | from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
| <commit_before>from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(comports) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
<commit_msg>Convert 'comports' to a list before getting its length (the value may be
a generator).<commit_after> | from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
| from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(comports) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
Convert 'comports' to a list before getting its length (the value may be
a generator).from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
| <commit_before>from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(comports) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
<commit_msg>Convert 'comports' to a list before getting its length (the value may be
a generator).<commit_after>from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
|
56b23bc44655e4a965939ceb5908cd84cfd9de88 | src/room.py | src/room.py | class Room(object):
""" This class is responsible for managing the people in a room """
| class Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
| Add init method to class Room | Add init method to class Room
| Python | mit | EdwinKato/Space-Allocator,EdwinKato/Space-Allocator | class Room(object):
""" This class is responsible for managing the people in a room """
Add init method to class Room | class Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
| <commit_before>class Room(object):
""" This class is responsible for managing the people in a room """
<commit_msg>Add init method to class Room<commit_after> | class Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
| class Room(object):
""" This class is responsible for managing the people in a room """
Add init method to class Roomclass Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
| <commit_before>class Room(object):
""" This class is responsible for managing the people in a room """
<commit_msg>Add init method to class Room<commit_after>class Room(object):
""" This class is responsible for managing the people in a room """
def __init__(self, room_type, room_name):
self.residents = []
self.room_name = room_name
self.room_type =room_type
if room_type == "office":
self.maximum_no_of_people = 6
else:
self.maximum_no_of_people = 4
|
a72a13aa89c11c4d9a2bad48b67abdf352989981 | parsley/decorators.py | parsley/decorators.py | from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if type(val) == forms.URLField:
val.widget.attrs.update({"data-type": "url"})
if type(val) == forms.EmailField:
val.widget.attrs.update({"data-type": "email"})
if type(val) == forms.IntegerField:
val.widget.attrs.update({"data-type": "digits"})
if type(val) == forms.DecimalField:
val.widget.attrs.update({"data-type": "number"})
if type(val) == forms.RegexField:
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
| from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if isinstance(val, forms.URLField):
val.widget.attrs.update({"data-type": "url"})
if isinstance(val, forms.EmailField):
val.widget.attrs.update({"data-type": "email"})
if isinstance(val, forms.IntegerField):
val.widget.attrs.update({"data-type": "digits"})
if isinstance(val, forms.DecimalField):
val.widget.attrs.update({"data-type": "number"})
if isinstance(val, forms.RegexField):
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
| Change type comparisions with isinstance | Change type comparisions with isinstance
| Python | bsd-3-clause | agiliq/Django-parsley,jproffitt/Django-parsley,jproffitt/Django-parsley,agiliq/Django-parsley,Tivix/Django-parsley,Tivix/Django-parsley,agiliq/Django-parsley,jproffitt/Django-parsley | from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if type(val) == forms.URLField:
val.widget.attrs.update({"data-type": "url"})
if type(val) == forms.EmailField:
val.widget.attrs.update({"data-type": "email"})
if type(val) == forms.IntegerField:
val.widget.attrs.update({"data-type": "digits"})
if type(val) == forms.DecimalField:
val.widget.attrs.update({"data-type": "number"})
if type(val) == forms.RegexField:
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
Change type comparisions with isinstance | from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if isinstance(val, forms.URLField):
val.widget.attrs.update({"data-type": "url"})
if isinstance(val, forms.EmailField):
val.widget.attrs.update({"data-type": "email"})
if isinstance(val, forms.IntegerField):
val.widget.attrs.update({"data-type": "digits"})
if isinstance(val, forms.DecimalField):
val.widget.attrs.update({"data-type": "number"})
if isinstance(val, forms.RegexField):
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
| <commit_before>from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if type(val) == forms.URLField:
val.widget.attrs.update({"data-type": "url"})
if type(val) == forms.EmailField:
val.widget.attrs.update({"data-type": "email"})
if type(val) == forms.IntegerField:
val.widget.attrs.update({"data-type": "digits"})
if type(val) == forms.DecimalField:
val.widget.attrs.update({"data-type": "number"})
if type(val) == forms.RegexField:
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
<commit_msg>Change type comparisions with isinstance<commit_after> | from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if isinstance(val, forms.URLField):
val.widget.attrs.update({"data-type": "url"})
if isinstance(val, forms.EmailField):
val.widget.attrs.update({"data-type": "email"})
if isinstance(val, forms.IntegerField):
val.widget.attrs.update({"data-type": "digits"})
if isinstance(val, forms.DecimalField):
val.widget.attrs.update({"data-type": "number"})
if isinstance(val, forms.RegexField):
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
| from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if type(val) == forms.URLField:
val.widget.attrs.update({"data-type": "url"})
if type(val) == forms.EmailField:
val.widget.attrs.update({"data-type": "email"})
if type(val) == forms.IntegerField:
val.widget.attrs.update({"data-type": "digits"})
if type(val) == forms.DecimalField:
val.widget.attrs.update({"data-type": "number"})
if type(val) == forms.RegexField:
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
Change type comparisions with isinstancefrom django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if isinstance(val, forms.URLField):
val.widget.attrs.update({"data-type": "url"})
if isinstance(val, forms.EmailField):
val.widget.attrs.update({"data-type": "email"})
if isinstance(val, forms.IntegerField):
val.widget.attrs.update({"data-type": "digits"})
if isinstance(val, forms.DecimalField):
val.widget.attrs.update({"data-type": "number"})
if isinstance(val, forms.RegexField):
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
| <commit_before>from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if type(val) == forms.URLField:
val.widget.attrs.update({"data-type": "url"})
if type(val) == forms.EmailField:
val.widget.attrs.update({"data-type": "email"})
if type(val) == forms.IntegerField:
val.widget.attrs.update({"data-type": "digits"})
if type(val) == forms.DecimalField:
val.widget.attrs.update({"data-type": "number"})
if type(val) == forms.RegexField:
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
<commit_msg>Change type comparisions with isinstance<commit_after>from django import forms
def parsleyfy(klass):
class ParsleyClass(klass):
def __init__(self, *args, **kwargs):
super(ParsleyClass, self).__init__(*args, **kwargs)
for key, val in self.fields.items():
if val.required:
val.widget.attrs.update({"data-required": "true"})
if isinstance(val, forms.URLField):
val.widget.attrs.update({"data-type": "url"})
if isinstance(val, forms.EmailField):
val.widget.attrs.update({"data-type": "email"})
if isinstance(val, forms.IntegerField):
val.widget.attrs.update({"data-type": "digits"})
if isinstance(val, forms.DecimalField):
val.widget.attrs.update({"data-type": "number"})
if isinstance(val, forms.RegexField):
val.widget.attrs.update({"data-regexp": val.regex.pattern})
if hasattr(val, "max_length") and val.max_length:
val.widget.attrs.update({"data-maxlength": val.max_length})
if hasattr(val, "min_length") and val.min_length:
val.widget.attrs.update({"data-minlength": val.min_length})
return ParsleyClass
|
5cf8088d1cd808a31a38c776fd4668858205ddf2 | openedx/features/job_board/views.py | openedx/features/job_board/views.py | from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
| from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
| Change job_count var name to total_job_count | Change job_count var name to total_job_count
| Python | agpl-3.0 | philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform | from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
Change job_count var name to total_job_count | from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
| <commit_before>from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
<commit_msg>Change job_count var name to total_job_count<commit_after> | from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
| from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
Change job_count var name to total_job_countfrom django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
| <commit_before>from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
<commit_msg>Change job_count var name to total_job_count<commit_after>from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobsListView(ListView):
model = Job
context_object_name = 'jobs_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
def get_context_data(self, **kwargs):
context = super(JobsListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
e0d4defa7bc7cd3ac70d77040c00150215ca251f | gavel/controllers/csrf_protection.py | gavel/controllers/csrf_protection.py | from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
| from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.get('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
| Switch CSRF token from per-request to per-session | Switch CSRF token from per-request to per-session
This patch switches the CSRF token handling from per-request to
per-session. The per-request behavior was overly aggressive, and it
restricted parallel browsing (e.g. multiple open browser tabs).
| Python | agpl-3.0 | anishathalye/gavel,anishathalye/gavel,anishathalye/gavel | from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
Switch CSRF token from per-request to per-session
This patch switches the CSRF token handling from per-request to
per-session. The per-request behavior was overly aggressive, and it
restricted parallel browsing (e.g. multiple open browser tabs). | from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.get('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
| <commit_before>from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
<commit_msg>Switch CSRF token from per-request to per-session
This patch switches the CSRF token handling from per-request to
per-session. The per-request behavior was overly aggressive, and it
restricted parallel browsing (e.g. multiple open browser tabs).<commit_after> | from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.get('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
| from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
Switch CSRF token from per-request to per-session
This patch switches the CSRF token handling from per-request to
per-session. The per-request behavior was overly aggressive, and it
restricted parallel browsing (e.g. multiple open browser tabs).from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.get('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
| <commit_before>from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
<commit_msg>Switch CSRF token from per-request to per-session
This patch switches the CSRF token handling from per-request to
per-session. The per-request behavior was overly aggressive, and it
restricted parallel browsing (e.g. multiple open browser tabs).<commit_after>from gavel import app
import gavel.utils as utils
from flask import abort, request, session
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.get('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = utils.gen_secret(32)
return session['_csrf_token']
app.jinja_env.globals['csrf_token'] = generate_csrf_token
|
370e00e40012b4a99ccb9ec6e67e6a6b45a7f661 | common/rc_sensor_cli.py | common/rc_sensor_cli.py | #!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=150))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count()) | #!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=180))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count()) | Increase timeout on RC sensor readings | Increase timeout on RC sensor readings
| Python | mit | mecworks/garden_pi,mecworks/garden_pi,mecworks/garden_pi,mecworks/garden_pi | #!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=150))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count())Increase timeout on RC sensor readings | #!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=180))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count()) | <commit_before>#!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=150))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count())<commit_msg>Increase timeout on RC sensor readings<commit_after> | #!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=180))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count()) | #!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=150))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count())Increase timeout on RC sensor readings#!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=180))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count()) | <commit_before>#!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=150))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count())<commit_msg>Increase timeout on RC sensor readings<commit_after>#!/usr/bin/env python
import pexpect
import os
import re
path = os.path.dirname(__file__)
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
self.rcsensor_bin = os.path.join(path, '../utils/rcsensor_cli')
self.rcsensor_cmd = 'sudo %s -g %s -c %s -d %s' % (self.rcsensor_bin, gpio, cycles, discharge_delay)
self.rcsense_re = re.compile('(\d+)\s')
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
m = self.rcsense_re.match(pexpect.run(self.rcsensor_cmd, timeout=180))
count = int(m.groups()[0])
return count
if __name__ == '__main__':
sensor = RcSensor(22)
print(sensor.rc_count()) |
316d9557002c54c5dd03f2a740367946b997d06a | src/foremast/utils/generate_encoded_user_data.py | src/foremast/utils/generate_encoded_user_data.py | """Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
| """Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
| Use new relative import within directory | fix: Use new relative import within directory
See also: PSOBAT-1197
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
fix: Use new relative import within directory
See also: PSOBAT-1197 | """Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
| <commit_before>"""Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
<commit_msg>fix: Use new relative import within directory
See also: PSOBAT-1197<commit_after> | """Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
| """Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
fix: Use new relative import within directory
See also: PSOBAT-1197"""Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
| <commit_before>"""Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
<commit_msg>fix: Use new relative import within directory
See also: PSOBAT-1197<commit_after>"""Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
|
76551f7a05506a872ec6535eb3263710650ea8ce | glue/core/data_factories/__init__.py | glue/core/data_factories/__init__.py | from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
| from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
| Order of import matters for disambiguation, but this should be fixed later to avoid this. | Order of import matters for disambiguation, but this should be fixed later to avoid this.
| Python | bsd-3-clause | saimn/glue,stscieisenhamer/glue,stscieisenhamer/glue,JudoWill/glue,saimn/glue,JudoWill/glue | from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
Order of import matters for disambiguation, but this should be fixed later to avoid this. | from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
| <commit_before>from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
<commit_msg>Order of import matters for disambiguation, but this should be fixed later to avoid this.<commit_after> | from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
| from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
Order of import matters for disambiguation, but this should be fixed later to avoid this.from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
| <commit_before>from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .dendrogram import *
from .tables import *
<commit_msg>Order of import matters for disambiguation, but this should be fixed later to avoid this.<commit_after>from .helpers import *
from .gridded import *
from .pandas import *
from .excel import *
from .image import *
from .tables import *
from .dendrogram import *
|
21a7e7557b8e02ef448bcc46a63e983f48cafe38 | tests/test_endpoint.py | tests/test_endpoint.py | from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
assert set(endpoints.keys()) == {('/foo', methods.GET)}
| from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
foo_endpoint = Endpoint('/foo', methods.GET)
assert set(endpoints.keys()) == {foo_endpoint}
assert endpoints[foo_endpoint] == sample_view
| Test Endpoint.endpoints has all endpoints | Test Endpoint.endpoints has all endpoints
| Python | mit | acuros/noopy | from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
assert set(endpoints.keys()) == {('/foo', methods.GET)}
Test Endpoint.endpoints has all endpoints | from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
foo_endpoint = Endpoint('/foo', methods.GET)
assert set(endpoints.keys()) == {foo_endpoint}
assert endpoints[foo_endpoint] == sample_view
| <commit_before>from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
assert set(endpoints.keys()) == {('/foo', methods.GET)}
<commit_msg>Test Endpoint.endpoints has all endpoints<commit_after> | from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
foo_endpoint = Endpoint('/foo', methods.GET)
assert set(endpoints.keys()) == {foo_endpoint}
assert endpoints[foo_endpoint] == sample_view
| from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
assert set(endpoints.keys()) == {('/foo', methods.GET)}
Test Endpoint.endpoints has all endpointsfrom noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
foo_endpoint = Endpoint('/foo', methods.GET)
assert set(endpoints.keys()) == {foo_endpoint}
assert endpoints[foo_endpoint] == sample_view
| <commit_before>from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
assert set(endpoints.keys()) == {('/foo', methods.GET)}
<commit_msg>Test Endpoint.endpoints has all endpoints<commit_after>from noopy.endpoint import methods
from noopy.endpoint.decorators import endpoint
@endpoint('/foo', methods.GET)
def sample_view(event, context):
pass
def test_resources_added():
from noopy.endpoint.resource import Resource
resources = Resource.resources
assert set(resources.keys()) == {'/', '/foo'}
assert resources['/'].children == [resources['/foo']]
assert resources['/foo'].parent == resources['/']
def test_endpoints_added():
from noopy.endpoint import Endpoint
endpoints = Endpoint.endpoints
foo_endpoint = Endpoint('/foo', methods.GET)
assert set(endpoints.keys()) == {foo_endpoint}
assert endpoints[foo_endpoint] == sample_view
|
6130d6d20992dff449696094790a4177302aae9a | tests/unit/conftest.py | tests/unit/conftest.py | from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='module')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
| from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='session')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
| Set up the database once for all unittests | Set up the database once for all unittests
Resolves an error with a static pointer to a dead database and speeds up tests
| Python | cc0-1.0 | chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend | from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='module')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
Set up the database once for all unittests
Resolves an error with a static pointer to a dead database and speeds up tests | from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='session')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
| <commit_before>from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='module')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
<commit_msg>Set up the database once for all unittests
Resolves an error with a static pointer to a dead database and speeds up tests<commit_after> | from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='session')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
| from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='module')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
Set up the database once for all unittests
Resolves an error with a static pointer to a dead database and speeds up testsfrom random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='session')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
| <commit_before>from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='module')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
<commit_msg>Set up the database once for all unittests
Resolves an error with a static pointer to a dead database and speeds up tests<commit_after>from random import randint
import pytest
import dataactcore.config
from dataactcore.scripts.databaseSetup import (
createDatabase, dropDatabase, runMigrations)
from dataactvalidator.interfaces.interfaceHolder import InterfaceHolder
@pytest.yield_fixture(scope='session')
def database():
"""Sets up a clean database, yielding a relevant interface holder"""
rand_id = str(randint(1, 9999))
config = dataactcore.config.CONFIG_DB
config['db_name'] = 'unittest{}_data_broker'.format(rand_id)
dataactcore.config.CONFIG_DB = config
createDatabase(config['db_name'])
runMigrations()
interface = InterfaceHolder()
yield interface
interface.close()
dropDatabase(config['db_name'])
|
b203f5ebbec108da7abffc3c5ef3a8a2d0334837 | planet_alignment/app/app_factory.py | planet_alignment/app/app_factory.py | """
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| """
.. module:: app_factory
:platform: linux
:synopsis: The class factory to create the application App.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| Add doc synopsis for the app factory. | Add doc synopsis for the app factory.
| Python | mit | paulfanelli/planet_alignment | """
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
Add doc synopsis for the app factory. | """
.. module:: app_factory
:platform: linux
:synopsis: The class factory to create the application App.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| <commit_before>"""
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
<commit_msg>Add doc synopsis for the app factory.<commit_after> | """
.. module:: app_factory
:platform: linux
:synopsis: The class factory to create the application App.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| """
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
Add doc synopsis for the app factory."""
.. module:: app_factory
:platform: linux
:synopsis: The class factory to create the application App.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| <commit_before>"""
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
<commit_msg>Add doc synopsis for the app factory.<commit_after>"""
.. module:: app_factory
:platform: linux
:synopsis: The class factory to create the application App.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
|
1e001eb11938bd5c613e655f86943167cd945d50 | local_sync_client.py | local_sync_client.py | # -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
| # -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
| Fix bug which caused put_object in LocalSyncClient to fail on create | Fix bug which caused put_object in LocalSyncClient to fail on create
| Python | mit | MichaelAquilina/s3backup,MichaelAquilina/s3backup | # -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
Fix bug which caused put_object in LocalSyncClient to fail on create | # -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
| <commit_before># -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
<commit_msg>Fix bug which caused put_object in LocalSyncClient to fail on create<commit_after> | # -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
| # -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
Fix bug which caused put_object in LocalSyncClient to fail on create# -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
| <commit_before># -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
<commit_msg>Fix bug which caused put_object in LocalSyncClient to fail on create<commit_after># -*- coding: utf-8 -*-
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
|
8c2ebccac0f633b3d2198a6a9d477ac4b8a620df | koztumize/application.py | koztumize/application.py | """Declare the Koztumize application using Pynuts."""
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize(
__name__, config_file='config/config.cfg') # pylint: disable=C0103
| """Declare the Koztumize application using Pynuts."""
import os
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize( # pylint: disable=E1101
__name__,
config_file=os.environ.get('KOZTUMIZE_CONFIG') or 'config/config.cfg')
| Use an environment variable as config file | Use an environment variable as config file
| Python | agpl-3.0 | Kozea/Koztumize,Kozea/Koztumize,Kozea/Koztumize | """Declare the Koztumize application using Pynuts."""
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize(
__name__, config_file='config/config.cfg') # pylint: disable=C0103
Use an environment variable as config file | """Declare the Koztumize application using Pynuts."""
import os
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize( # pylint: disable=E1101
__name__,
config_file=os.environ.get('KOZTUMIZE_CONFIG') or 'config/config.cfg')
| <commit_before>"""Declare the Koztumize application using Pynuts."""
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize(
__name__, config_file='config/config.cfg') # pylint: disable=C0103
<commit_msg>Use an environment variable as config file<commit_after> | """Declare the Koztumize application using Pynuts."""
import os
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize( # pylint: disable=E1101
__name__,
config_file=os.environ.get('KOZTUMIZE_CONFIG') or 'config/config.cfg')
| """Declare the Koztumize application using Pynuts."""
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize(
__name__, config_file='config/config.cfg') # pylint: disable=C0103
Use an environment variable as config file"""Declare the Koztumize application using Pynuts."""
import os
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize( # pylint: disable=E1101
__name__,
config_file=os.environ.get('KOZTUMIZE_CONFIG') or 'config/config.cfg')
| <commit_before>"""Declare the Koztumize application using Pynuts."""
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize(
__name__, config_file='config/config.cfg') # pylint: disable=C0103
<commit_msg>Use an environment variable as config file<commit_after>"""Declare the Koztumize application using Pynuts."""
import os
import ldap
from pynuts import Pynuts
class Koztumize(Pynuts):
"""The class which open the ldap."""
@property
def ldap(self):
"""Open the ldap."""
if 'LDAP' not in self.config: # pragma: no cover
self.config['LDAP'] = ldap.open(self.config['LDAP_HOST'])
return self.config['LDAP']
app = Koztumize( # pylint: disable=E1101
__name__,
config_file=os.environ.get('KOZTUMIZE_CONFIG') or 'config/config.cfg')
|
e08be68c9a998678edbb24620518fad7d02582b6 | lib/datasets/__init__.py | lib/datasets/__init__.py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
MATLAB = 'matlab'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
| # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
#MATLAB = 'matlab'
MATLAB = 'matlab.exe'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
| Use matlab.exe instead of matlab in Windows | Use matlab.exe instead of matlab in Windows | Python | mit | only4hj/fast-rcnn,only4hj/fast-rcnn,only4hj/fast-rcnn | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
MATLAB = 'matlab'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
Use matlab.exe instead of matlab in Windows | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
#MATLAB = 'matlab'
MATLAB = 'matlab.exe'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
| <commit_before># --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
MATLAB = 'matlab'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
<commit_msg>Use matlab.exe instead of matlab in Windows<commit_after> | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
#MATLAB = 'matlab'
MATLAB = 'matlab.exe'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
| # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
MATLAB = 'matlab'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
Use matlab.exe instead of matlab in Windows# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
#MATLAB = 'matlab'
MATLAB = 'matlab.exe'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
| <commit_before># --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
MATLAB = 'matlab'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
<commit_msg>Use matlab.exe instead of matlab in Windows<commit_after># --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from .imdb import imdb
from .pascal_voc import pascal_voc
from . import factory
import os.path as osp
ROOT_DIR = osp.join(osp.dirname(__file__), '..', '..')
# We assume your matlab binary is in your path and called `matlab'.
# If either is not true, just add it to your path and alias it as matlab, or
# you could change this file.
#MATLAB = 'matlab'
MATLAB = 'matlab.exe'
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if _which(MATLAB) is None:
msg = ("MATLAB command '{}' not found. "
"Please add '{}' to your PATH.").format(MATLAB, MATLAB)
raise EnvironmentError(msg)
|
83863677227436572be18e76803ee6f2ada24f8a | version.py | version.py | # -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 9, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform() | # -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 1, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform() | Change 0.9.2 to 0.1.2 (Kind of silly to have 0.9 just 3 weeks after starting...) | Change 0.9.2 to 0.1.2 (Kind of silly to have 0.9 just 3 weeks after starting...)
| Python | mit | shacknetisp/fourthevaz,shacknetisp/fourthevaz,shacknetisp/fourthevaz | # -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 9, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform()Change 0.9.2 to 0.1.2 (Kind of silly to have 0.9 just 3 weeks after starting...) | # -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 1, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform() | <commit_before># -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 9, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform()<commit_msg>Change 0.9.2 to 0.1.2 (Kind of silly to have 0.9 just 3 weeks after starting...)<commit_after> | # -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 1, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform() | # -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 9, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform()Change 0.9.2 to 0.1.2 (Kind of silly to have 0.9 just 3 weeks after starting...)# -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 1, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform() | <commit_before># -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 9, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform()<commit_msg>Change 0.9.2 to 0.1.2 (Kind of silly to have 0.9 just 3 weeks after starting...)<commit_after># -*- coding: utf-8 -*-
import platform
name = "Fourth Evaz"
version = (0, 1, 2)
source = "https://github.com/shacknetisp/fourthevaz"
def versionstr():
return "%d.%d.%d" % (version[0], version[1], version[2])
def pythonversionstr():
return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple())
def systemversionstr():
return platform.platform() |
f8ccb67ad9775fa1babe79640d4db46027531046 | examples/async_step/features/steps/async_dispatch_steps.py | examples/async_step/features/steps/async_dispatch_steps.py | from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
| # -*- coding: UTF-8 -*-
# REQUIRES: Python >= 3.5
from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
| Add encoding hint (and which python version is required). | Add encoding hint (and which python version is required).
| Python | bsd-2-clause | jenisys/behave,jenisys/behave | from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
Add encoding hint (and which python version is required). | # -*- coding: UTF-8 -*-
# REQUIRES: Python >= 3.5
from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
| <commit_before>from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
<commit_msg>Add encoding hint (and which python version is required).<commit_after> | # -*- coding: UTF-8 -*-
# REQUIRES: Python >= 3.5
from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
| from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
Add encoding hint (and which python version is required).# -*- coding: UTF-8 -*-
# REQUIRES: Python >= 3.5
from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
| <commit_before>from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
<commit_msg>Add encoding hint (and which python version is required).<commit_after># -*- coding: UTF-8 -*-
# REQUIRES: Python >= 3.5
from behave import given, then, step
from behave.api.async_step import use_or_create_async_context, AsyncContext
from hamcrest import assert_that, equal_to, empty
import asyncio
@asyncio.coroutine
def async_func(param):
yield from asyncio.sleep(0.2)
return str(param).upper()
@given('I dispatch an async-call with param "{param}"')
def step_dispatch_async_call(context, param):
async_context = use_or_create_async_context(context, "async_context1")
task = async_context.loop.create_task(async_func(param))
async_context.tasks.append(task)
@then('the collected result of the async-calls is "{expected}"')
def step_collected_async_call_result_is(context, expected):
async_context = context.async_context1
done, pending = async_context.loop.run_until_complete(
asyncio.wait(async_context.tasks, loop=async_context.loop))
parts = [task.result() for task in done]
joined_result = ", ".join(sorted(parts))
assert_that(joined_result, equal_to(expected))
assert_that(pending, empty())
|
1c516e64518597404e3928d445fb3239748a4861 | performanceplatform/collector/logging_setup.py | performanceplatform/collector/logging_setup.py | from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
| from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
| Add `json_fields` parameter to set_up_logging | Add `json_fields` parameter to set_up_logging
This will allow the main function to add extra fields to JSON log
messages, for example to pass through command-line arguments.
See https://www.pivotaltracker.com/story/show/70748012
| Python | mit | alphagov/performanceplatform-collector,alphagov/performanceplatform-collector,alphagov/performanceplatform-collector | from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
Add `json_fields` parameter to set_up_logging
This will allow the main function to add extra fields to JSON log
messages, for example to pass through command-line arguments.
See https://www.pivotaltracker.com/story/show/70748012 | from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
| <commit_before>from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
<commit_msg>Add `json_fields` parameter to set_up_logging
This will allow the main function to add extra fields to JSON log
messages, for example to pass through command-line arguments.
See https://www.pivotaltracker.com/story/show/70748012<commit_after> | from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
| from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
Add `json_fields` parameter to set_up_logging
This will allow the main function to add extra fields to JSON log
messages, for example to pass through command-line arguments.
See https://www.pivotaltracker.com/story/show/70748012from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
| <commit_before>from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'), app_name))
logger.info("{0} logging started".format(app_name))
<commit_msg>Add `json_fields` parameter to set_up_logging
This will allow the main function to add extra fields to JSON log
messages, for example to pass through command-line arguments.
See https://www.pivotaltracker.com/story/show/70748012<commit_after>from logstash_formatter import LogstashFormatter
import logging
import os
import pdb
import sys
import traceback
def get_log_file_handler(path):
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
return handler
def get_json_log_handler(path, app_name, json_fields):
handler = logging.FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['collector', app_name]
formatter.defaults.update(json_fields)
handler.setFormatter(formatter)
return handler
def uncaught_exception_handler(*exc_info):
text = "".join(traceback.format_exception(*exc_info))
logging.error("Unhandled exception: %s", text)
def set_up_logging(app_name, log_level, logfile_path, json_fields=None):
sys.excepthook = uncaught_exception_handler
logger = logging.getLogger()
logger.setLevel(log_level)
logger.addHandler(get_log_file_handler(
os.path.join(logfile_path, 'collector.log')))
logger.addHandler(get_json_log_handler(
os.path.join(logfile_path, 'collector.log.json'),
app_name,
json_fields=json_fields if json_fields else {}))
logger.info("{0} logging started".format(app_name))
|
a102731c88f496b557dedd4024fb9b82801d134a | oauthlib/__init__.py | oauthlib/__init__.py | """
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger('oauthlib').addHandler(NullHandler())
| """
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
logging.getLogger('oauthlib').addHandler(NullHandler())
| Remove Python 2.6 compatibility code. | Remove Python 2.6 compatibility code. | Python | bsd-3-clause | idan/oauthlib,oauthlib/oauthlib | """
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger('oauthlib').addHandler(NullHandler())
Remove Python 2.6 compatibility code. | """
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
logging.getLogger('oauthlib').addHandler(NullHandler())
| <commit_before>"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger('oauthlib').addHandler(NullHandler())
<commit_msg>Remove Python 2.6 compatibility code.<commit_after> | """
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
logging.getLogger('oauthlib').addHandler(NullHandler())
| """
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger('oauthlib').addHandler(NullHandler())
Remove Python 2.6 compatibility code."""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
logging.getLogger('oauthlib').addHandler(NullHandler())
| <commit_before>"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger('oauthlib').addHandler(NullHandler())
<commit_msg>Remove Python 2.6 compatibility code.<commit_after>"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler
__author__ = 'The OAuthlib Community'
__version__ = '2.1.0'
logging.getLogger('oauthlib').addHandler(NullHandler())
|
bbba3cdedaa536e11691275abb60964b5afd2ffe | obuka/__openerp__.py | obuka/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : [],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : ['base'],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
| Add depends in our addon | Add depends in our addon
| Python | mit | gdamjan/openerp-training | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : [],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
Add depends in our addon | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : ['base'],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : [],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
<commit_msg>Add depends in our addon<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : ['base'],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : [],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
Add depends in our addon# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : ['base'],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : [],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
<commit_msg>Add depends in our addon<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "obuka",
"version" : "1.0",
"author" : "gdamjan",
"category": 'Generic Modules',
"description": "test test some description",
'website': 'http://damjan.softver.org.mk',
'init_xml': [],
"depends" : ['base'],
'update_xml': ['obuka_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
|
8a7fd251454026baf3cf7a0a1aa0300a0f3772bc | pycanvas/assignment.py | pycanvas/assignment.py | from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self): # pragma: no cover
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
| from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
| Remove no-cover from __str__ method | Remove no-cover from __str__ method
| Python | mit | ucfopen/canvasapi,ucfopen/canvasapi,ucfopen/canvasapi | from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self): # pragma: no cover
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
Remove no-cover from __str__ method | from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
| <commit_before>from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self): # pragma: no cover
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
<commit_msg>Remove no-cover from __str__ method<commit_after> | from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
| from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self): # pragma: no cover
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
Remove no-cover from __str__ methodfrom canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
| <commit_before>from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self): # pragma: no cover
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
<commit_msg>Remove no-cover from __str__ method<commit_after>from canvas_object import CanvasObject
from util import combine_kwargs
class Assignment(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def delete(self):
"""
Delete this assignment.
:calls: `DELETE /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments.destroy>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'DELETE',
'courses/%s/assignments/%s' % (self.course_id, self.id),
)
return Assignment(self._requester, response.json())
def edit(self, **kwargs):
"""
Modify this assignment.
:calls: `PUT /api/v1/courses/:course_id/assignments/:id \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.update>`_
:rtype: :class:`pycanvas.assignment.Assignment`
"""
response = self._requester.request(
'PUT',
'courses/%s/assignments/%s' % (self.course_id, self.id),
**combine_kwargs(**kwargs)
)
if 'name' in response.json():
super(Assignment, self).set_attributes(response.json())
return Assignment(self._requester, response.json())
|
75fd199b239c23d7396bd3b5803d3c6007361b5a | test/repsitory_test.py | test/repsitory_test.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import io
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertEqual(ModuleNotFoundError, cm.exception.exception.__class__)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertTrue(issubclass(cm.exception.exception.__class__, ImportError))
if __name__ == '__main__':
unittest.main()
| Fix repository import test for Python < 3.6 | Fix repository import test for Python < 3.6
Check for broader range of exceptions.
| Python | bsd-2-clause | dergraaf/library-builder,dergraaf/library-builder | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import io
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertEqual(ModuleNotFoundError, cm.exception.exception.__class__)
if __name__ == '__main__':
unittest.main()
Fix repository import test for Python < 3.6
Check for broader range of exceptions. | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertTrue(issubclass(cm.exception.exception.__class__, ImportError))
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import io
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertEqual(ModuleNotFoundError, cm.exception.exception.__class__)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix repository import test for Python < 3.6
Check for broader range of exceptions.<commit_after> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertTrue(issubclass(cm.exception.exception.__class__, ImportError))
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import io
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertEqual(ModuleNotFoundError, cm.exception.exception.__class__)
if __name__ == '__main__':
unittest.main()
Fix repository import test for Python < 3.6
Check for broader range of exceptions.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertTrue(issubclass(cm.exception.exception.__class__, ImportError))
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import io
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertEqual(ModuleNotFoundError, cm.exception.exception.__class__)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix repository import test for Python < 3.6
Check for broader range of exceptions.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import sys
import unittest
# Hack to support the usage of `coverage`
sys.path.append(os.path.abspath("."))
import lbuild
class RepositoryTest(unittest.TestCase):
def _get_path(self, filename):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),
"resources", "repository", filename)
def setUp(self):
self.parser = lbuild.parser.Parser()
def test_should_generate_exception_on_import_error(self):
with self.assertRaises(lbuild.exception.BlobForwardException) as cm:
self.parser.parse_repository(self._get_path("invalid_import.lb"))
self.assertTrue(issubclass(cm.exception.exception.__class__, ImportError))
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.