repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_down
def do_down(self, arg): """d(own) [count] Move the current frame count (default one) levels down in the stack trace (to a newer frame). """ if self.curindex + 1 == len(self.stack): self.error('Newest frame') return try: count = int(arg or 1) except ValueError: self.error('Invalid frame count (%s)' % arg) return if count < 0: newframe = len(self.stack) - 1 else: newframe = min(len(self.stack) - 1, self.curindex + count) self._select_frame(newframe)
python
def do_down(self, arg): """d(own) [count] Move the current frame count (default one) levels down in the stack trace (to a newer frame). """ if self.curindex + 1 == len(self.stack): self.error('Newest frame') return try: count = int(arg or 1) except ValueError: self.error('Invalid frame count (%s)' % arg) return if count < 0: newframe = len(self.stack) - 1 else: newframe = min(len(self.stack) - 1, self.curindex + count) self._select_frame(newframe)
[ "def", "do_down", "(", "self", ",", "arg", ")", ":", "if", "self", ".", "curindex", "+", "1", "==", "len", "(", "self", ".", "stack", ")", ":", "self", ".", "error", "(", "'Newest frame'", ")", "return", "try", ":", "count", "=", "int", "(", "arg", "or", "1", ")", "except", "ValueError", ":", "self", ".", "error", "(", "'Invalid frame count (%s)'", "%", "arg", ")", "return", "if", "count", "<", "0", ":", "newframe", "=", "len", "(", "self", ".", "stack", ")", "-", "1", "else", ":", "newframe", "=", "min", "(", "len", "(", "self", ".", "stack", ")", "-", "1", ",", "self", ".", "curindex", "+", "count", ")", "self", ".", "_select_frame", "(", "newframe", ")" ]
d(own) [count] Move the current frame count (default one) levels down in the stack trace (to a newer frame).
[ "d", "(", "own", ")", "[", "count", "]", "Move", "the", "current", "frame", "count", "(", "default", "one", ")", "levels", "down", "in", "the", "stack", "trace", "(", "to", "a", "newer", "frame", ")", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1191-L1208
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_until
def do_until(self, arg): """unt(il) [lineno] Without argument, continue execution until the line with a number greater than the current one is reached. With a line number, continue execution until a line with a number greater or equal to that is reached. In both cases, also stop when the current frame returns. """ if arg: try: lineno = int(arg) except ValueError: self.error('Error in argument: %r' % arg) return if lineno <= self.curframe.f_lineno: self.error('"until" line number is smaller than current ' 'line number') return else: lineno = None self.set_until(self.curframe, lineno) self.set_sigint_handler() return 1
python
def do_until(self, arg): """unt(il) [lineno] Without argument, continue execution until the line with a number greater than the current one is reached. With a line number, continue execution until a line with a number greater or equal to that is reached. In both cases, also stop when the current frame returns. """ if arg: try: lineno = int(arg) except ValueError: self.error('Error in argument: %r' % arg) return if lineno <= self.curframe.f_lineno: self.error('"until" line number is smaller than current ' 'line number') return else: lineno = None self.set_until(self.curframe, lineno) self.set_sigint_handler() return 1
[ "def", "do_until", "(", "self", ",", "arg", ")", ":", "if", "arg", ":", "try", ":", "lineno", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "self", ".", "error", "(", "'Error in argument: %r'", "%", "arg", ")", "return", "if", "lineno", "<=", "self", ".", "curframe", ".", "f_lineno", ":", "self", ".", "error", "(", "'\"until\" line number is smaller than current '", "'line number'", ")", "return", "else", ":", "lineno", "=", "None", "self", ".", "set_until", "(", "self", ".", "curframe", ",", "lineno", ")", "self", ".", "set_sigint_handler", "(", ")", "return", "1" ]
unt(il) [lineno] Without argument, continue execution until the line with a number greater than the current one is reached. With a line number, continue execution until a line with a number greater or equal to that is reached. In both cases, also stop when the current frame returns.
[ "unt", "(", "il", ")", "[", "lineno", "]", "Without", "argument", "continue", "execution", "until", "the", "line", "with", "a", "number", "greater", "than", "the", "current", "one", "is", "reached", ".", "With", "a", "line", "number", "continue", "execution", "until", "a", "line", "with", "a", "number", "greater", "or", "equal", "to", "that", "is", "reached", ".", "In", "both", "cases", "also", "stop", "when", "the", "current", "frame", "returns", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1211-L1233
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_run
def do_run(self, arg): """run [args...] Restart the debugged python program. If a string is supplied it is splitted with "shlex", and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run". """ if arg: argv0 = sys.argv[0:1] sys.argv = shlex.split(arg) sys.argv[:0] = argv0 # this is caught in the main debugger loop raise Restart
python
def do_run(self, arg): """run [args...] Restart the debugged python program. If a string is supplied it is splitted with "shlex", and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run". """ if arg: argv0 = sys.argv[0:1] sys.argv = shlex.split(arg) sys.argv[:0] = argv0 # this is caught in the main debugger loop raise Restart
[ "def", "do_run", "(", "self", ",", "arg", ")", ":", "if", "arg", ":", "argv0", "=", "sys", ".", "argv", "[", "0", ":", "1", "]", "sys", ".", "argv", "=", "shlex", ".", "split", "(", "arg", ")", "sys", ".", "argv", "[", ":", "0", "]", "=", "argv0", "# this is caught in the main debugger loop", "raise", "Restart" ]
run [args...] Restart the debugged python program. If a string is supplied it is splitted with "shlex", and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run".
[ "run", "[", "args", "...", "]", "Restart", "the", "debugged", "python", "program", ".", "If", "a", "string", "is", "supplied", "it", "is", "splitted", "with", "shlex", "and", "the", "result", "is", "used", "as", "the", "new", "sys", ".", "argv", ".", "History", "breakpoints", "actions", "and", "debugger", "options", "are", "preserved", ".", "restart", "is", "an", "alias", "for", "run", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1257-L1269
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_jump
def do_jump(self, arg): """j(ump) lineno Set the next line that will be executed. Only available in the bottom-most frame. This lets you jump back and execute code again, or jump forward to skip code that you don't want to run. It should be noted that not all jumps are allowed -- for instance it is not possible to jump into the middle of a for loop or out of a finally clause. """ if self.curindex + 1 != len(self.stack): self.error('You can only jump within the bottom frame') return try: arg = int(arg) except ValueError: self.error("The 'jump' command requires a line number") else: try: # Do the jump, fix up our copy of the stack, and display the # new position self.curframe.f_lineno = arg self.stack[self.curindex] = self.stack[self.curindex][0], arg self.print_stack_entry(self.stack[self.curindex]) except ValueError as e: self.error('Jump failed: %s' % e)
python
def do_jump(self, arg): """j(ump) lineno Set the next line that will be executed. Only available in the bottom-most frame. This lets you jump back and execute code again, or jump forward to skip code that you don't want to run. It should be noted that not all jumps are allowed -- for instance it is not possible to jump into the middle of a for loop or out of a finally clause. """ if self.curindex + 1 != len(self.stack): self.error('You can only jump within the bottom frame') return try: arg = int(arg) except ValueError: self.error("The 'jump' command requires a line number") else: try: # Do the jump, fix up our copy of the stack, and display the # new position self.curframe.f_lineno = arg self.stack[self.curindex] = self.stack[self.curindex][0], arg self.print_stack_entry(self.stack[self.curindex]) except ValueError as e: self.error('Jump failed: %s' % e)
[ "def", "do_jump", "(", "self", ",", "arg", ")", ":", "if", "self", ".", "curindex", "+", "1", "!=", "len", "(", "self", ".", "stack", ")", ":", "self", ".", "error", "(", "'You can only jump within the bottom frame'", ")", "return", "try", ":", "arg", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "self", ".", "error", "(", "\"The 'jump' command requires a line number\"", ")", "else", ":", "try", ":", "# Do the jump, fix up our copy of the stack, and display the", "# new position", "self", ".", "curframe", ".", "f_lineno", "=", "arg", "self", ".", "stack", "[", "self", ".", "curindex", "]", "=", "self", ".", "stack", "[", "self", ".", "curindex", "]", "[", "0", "]", ",", "arg", "self", ".", "print_stack_entry", "(", "self", ".", "stack", "[", "self", ".", "curindex", "]", ")", "except", "ValueError", "as", "e", ":", "self", ".", "error", "(", "'Jump failed: %s'", "%", "e", ")" ]
j(ump) lineno Set the next line that will be executed. Only available in the bottom-most frame. This lets you jump back and execute code again, or jump forward to skip code that you don't want to run. It should be noted that not all jumps are allowed -- for instance it is not possible to jump into the middle of a for loop or out of a finally clause.
[ "j", "(", "ump", ")", "lineno", "Set", "the", "next", "line", "that", "will", "be", "executed", ".", "Only", "available", "in", "the", "bottom", "-", "most", "frame", ".", "This", "lets", "you", "jump", "back", "and", "execute", "code", "again", "or", "jump", "forward", "to", "skip", "code", "that", "you", "don", "t", "want", "to", "run", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1291-L1317
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_debug
def do_debug(self, arg): """debug code Enter a recursive debugger that steps through the code argument (which is an arbitrary expression or statement to be executed in the current environment). """ self.settrace(False) globals = self.curframe.f_globals locals = self.get_locals(self.curframe) p = Pdb(self.completekey, self.stdin, self.stdout, debug=True) p.prompt = "(%s) " % self.prompt.strip() self.message("ENTERING RECURSIVE DEBUGGER") sys.call_tracing(p.run, (arg, globals, locals)) self.message("LEAVING RECURSIVE DEBUGGER") self.settrace(True) self.lastcmd = p.lastcmd
python
def do_debug(self, arg): """debug code Enter a recursive debugger that steps through the code argument (which is an arbitrary expression or statement to be executed in the current environment). """ self.settrace(False) globals = self.curframe.f_globals locals = self.get_locals(self.curframe) p = Pdb(self.completekey, self.stdin, self.stdout, debug=True) p.prompt = "(%s) " % self.prompt.strip() self.message("ENTERING RECURSIVE DEBUGGER") sys.call_tracing(p.run, (arg, globals, locals)) self.message("LEAVING RECURSIVE DEBUGGER") self.settrace(True) self.lastcmd = p.lastcmd
[ "def", "do_debug", "(", "self", ",", "arg", ")", ":", "self", ".", "settrace", "(", "False", ")", "globals", "=", "self", ".", "curframe", ".", "f_globals", "locals", "=", "self", ".", "get_locals", "(", "self", ".", "curframe", ")", "p", "=", "Pdb", "(", "self", ".", "completekey", ",", "self", ".", "stdin", ",", "self", ".", "stdout", ",", "debug", "=", "True", ")", "p", ".", "prompt", "=", "\"(%s) \"", "%", "self", ".", "prompt", ".", "strip", "(", ")", "self", ".", "message", "(", "\"ENTERING RECURSIVE DEBUGGER\"", ")", "sys", ".", "call_tracing", "(", "p", ".", "run", ",", "(", "arg", ",", "globals", ",", "locals", ")", ")", "self", ".", "message", "(", "\"LEAVING RECURSIVE DEBUGGER\"", ")", "self", ".", "settrace", "(", "True", ")", "self", ".", "lastcmd", "=", "p", ".", "lastcmd" ]
debug code Enter a recursive debugger that steps through the code argument (which is an arbitrary expression or statement to be executed in the current environment).
[ "debug", "code", "Enter", "a", "recursive", "debugger", "that", "steps", "through", "the", "code", "argument", "(", "which", "is", "an", "arbitrary", "expression", "or", "statement", "to", "be", "executed", "in", "the", "current", "environment", ")", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1320-L1335
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_quit
def do_quit(self, arg): """q(uit)\nexit Quit from the debugger. The program being executed is aborted. """ if isinstance(self.stdin, RemoteSocket) and not self.is_debug_instance: return self.do_detach(arg) self._user_requested_quit = True self.set_quit() return 1
python
def do_quit(self, arg): """q(uit)\nexit Quit from the debugger. The program being executed is aborted. """ if isinstance(self.stdin, RemoteSocket) and not self.is_debug_instance: return self.do_detach(arg) self._user_requested_quit = True self.set_quit() return 1
[ "def", "do_quit", "(", "self", ",", "arg", ")", ":", "if", "isinstance", "(", "self", ".", "stdin", ",", "RemoteSocket", ")", "and", "not", "self", ".", "is_debug_instance", ":", "return", "self", ".", "do_detach", "(", "arg", ")", "self", ".", "_user_requested_quit", "=", "True", "self", ".", "set_quit", "(", ")", "return", "1" ]
q(uit)\nexit Quit from the debugger. The program being executed is aborted.
[ "q", "(", "uit", ")", "\\", "nexit", "Quit", "from", "the", "debugger", ".", "The", "program", "being", "executed", "is", "aborted", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1349-L1357
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_args
def do_args(self, arg): """a(rgs) Print the argument list of the current function. """ co = self.curframe.f_code dict = self.get_locals(self.curframe) n = co.co_argcount if co.co_flags & 4: n = n+1 if co.co_flags & 8: n = n+1 for i in range(n): name = co.co_varnames[i] if name in dict: self.message('%s = %s' % (name, bdb.safe_repr(dict[name]))) else: self.message('%s = *** undefined ***' % (name,))
python
def do_args(self, arg): """a(rgs) Print the argument list of the current function. """ co = self.curframe.f_code dict = self.get_locals(self.curframe) n = co.co_argcount if co.co_flags & 4: n = n+1 if co.co_flags & 8: n = n+1 for i in range(n): name = co.co_varnames[i] if name in dict: self.message('%s = %s' % (name, bdb.safe_repr(dict[name]))) else: self.message('%s = *** undefined ***' % (name,))
[ "def", "do_args", "(", "self", ",", "arg", ")", ":", "co", "=", "self", ".", "curframe", ".", "f_code", "dict", "=", "self", ".", "get_locals", "(", "self", ".", "curframe", ")", "n", "=", "co", ".", "co_argcount", "if", "co", ".", "co_flags", "&", "4", ":", "n", "=", "n", "+", "1", "if", "co", ".", "co_flags", "&", "8", ":", "n", "=", "n", "+", "1", "for", "i", "in", "range", "(", "n", ")", ":", "name", "=", "co", ".", "co_varnames", "[", "i", "]", "if", "name", "in", "dict", ":", "self", ".", "message", "(", "'%s = %s'", "%", "(", "name", ",", "bdb", ".", "safe_repr", "(", "dict", "[", "name", "]", ")", ")", ")", "else", ":", "self", ".", "message", "(", "'%s = *** undefined ***'", "%", "(", "name", ",", ")", ")" ]
a(rgs) Print the argument list of the current function.
[ "a", "(", "rgs", ")", "Print", "the", "argument", "list", "of", "the", "current", "function", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1369-L1383
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_retval
def do_retval(self, arg): """retval Print the return value for the last return of a function. """ locals = self.get_locals(self.curframe) if '__return__' in locals: self.message(bdb.safe_repr(locals['__return__'])) else: self.error('Not yet returned!')
python
def do_retval(self, arg): """retval Print the return value for the last return of a function. """ locals = self.get_locals(self.curframe) if '__return__' in locals: self.message(bdb.safe_repr(locals['__return__'])) else: self.error('Not yet returned!')
[ "def", "do_retval", "(", "self", ",", "arg", ")", ":", "locals", "=", "self", ".", "get_locals", "(", "self", ".", "curframe", ")", "if", "'__return__'", "in", "locals", ":", "self", ".", "message", "(", "bdb", ".", "safe_repr", "(", "locals", "[", "'__return__'", "]", ")", ")", "else", ":", "self", ".", "error", "(", "'Not yet returned!'", ")" ]
retval Print the return value for the last return of a function.
[ "retval", "Print", "the", "return", "value", "for", "the", "last", "return", "of", "a", "function", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1386-L1394
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_p
def do_p(self, arg): """p expression Print the value of the expression. """ try: self.message(bdb.safe_repr(self._getval(arg))) except Exception: pass
python
def do_p(self, arg): """p expression Print the value of the expression. """ try: self.message(bdb.safe_repr(self._getval(arg))) except Exception: pass
[ "def", "do_p", "(", "self", ",", "arg", ")", ":", "try", ":", "self", ".", "message", "(", "bdb", ".", "safe_repr", "(", "self", ".", "_getval", "(", "arg", ")", ")", ")", "except", "Exception", ":", "pass" ]
p expression Print the value of the expression.
[ "p", "expression", "Print", "the", "value", "of", "the", "expression", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1418-L1425
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_pp
def do_pp(self, arg): """pp expression Pretty-print the value of the expression. """ obj = self._getval(arg) try: repr(obj) except Exception: self.message(bdb.safe_repr(obj)) else: self.message(pprint.pformat(obj))
python
def do_pp(self, arg): """pp expression Pretty-print the value of the expression. """ obj = self._getval(arg) try: repr(obj) except Exception: self.message(bdb.safe_repr(obj)) else: self.message(pprint.pformat(obj))
[ "def", "do_pp", "(", "self", ",", "arg", ")", ":", "obj", "=", "self", ".", "_getval", "(", "arg", ")", "try", ":", "repr", "(", "obj", ")", "except", "Exception", ":", "self", ".", "message", "(", "bdb", ".", "safe_repr", "(", "obj", ")", ")", "else", ":", "self", ".", "message", "(", "pprint", ".", "pformat", "(", "obj", ")", ")" ]
pp expression Pretty-print the value of the expression.
[ "pp", "expression", "Pretty", "-", "print", "the", "value", "of", "the", "expression", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1427-L1437
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_list
def do_list(self, arg): """l(ist) [first [,last] | .] List source code for the current file. Without arguments, list 11 lines around the current line or continue the previous listing. With . as argument, list 11 lines around the current line. With one argument, list 11 lines starting at that line. With two arguments, list the given range; if the second argument is less than the first, it is a count. The current line in the current frame is indicated by "->". If an exception is being debugged, the line where the exception was originally raised or propagated is indicated by ">>", if it differs from the current line. """ self.lastcmd = 'list' last = None if arg and arg != '.': try: if ',' in arg: first, last = arg.split(',') first = int(first.strip()) last = int(last.strip()) if last < first: # assume it's a count last = first + last else: first = int(arg.strip()) first = max(1, first - 5) except ValueError: self.error('Error in argument: %r' % arg) return elif self.lineno is None or arg == '.': first = max(1, self.curframe.f_lineno - 5) else: first = self.lineno + 1 if last is None: last = first + 10 filename = self.curframe.f_code.co_filename breaklist = self.get_file_breaks(filename) try: lines = linecache.getlines(filename, self.curframe.f_globals) self._print_lines(lines[first-1:last], first, breaklist, self.curframe) self.lineno = min(last, len(lines)) if len(lines) < last: self.message('[EOF]') except KeyboardInterrupt: pass
python
def do_list(self, arg): """l(ist) [first [,last] | .] List source code for the current file. Without arguments, list 11 lines around the current line or continue the previous listing. With . as argument, list 11 lines around the current line. With one argument, list 11 lines starting at that line. With two arguments, list the given range; if the second argument is less than the first, it is a count. The current line in the current frame is indicated by "->". If an exception is being debugged, the line where the exception was originally raised or propagated is indicated by ">>", if it differs from the current line. """ self.lastcmd = 'list' last = None if arg and arg != '.': try: if ',' in arg: first, last = arg.split(',') first = int(first.strip()) last = int(last.strip()) if last < first: # assume it's a count last = first + last else: first = int(arg.strip()) first = max(1, first - 5) except ValueError: self.error('Error in argument: %r' % arg) return elif self.lineno is None or arg == '.': first = max(1, self.curframe.f_lineno - 5) else: first = self.lineno + 1 if last is None: last = first + 10 filename = self.curframe.f_code.co_filename breaklist = self.get_file_breaks(filename) try: lines = linecache.getlines(filename, self.curframe.f_globals) self._print_lines(lines[first-1:last], first, breaklist, self.curframe) self.lineno = min(last, len(lines)) if len(lines) < last: self.message('[EOF]') except KeyboardInterrupt: pass
[ "def", "do_list", "(", "self", ",", "arg", ")", ":", "self", ".", "lastcmd", "=", "'list'", "last", "=", "None", "if", "arg", "and", "arg", "!=", "'.'", ":", "try", ":", "if", "','", "in", "arg", ":", "first", ",", "last", "=", "arg", ".", "split", "(", "','", ")", "first", "=", "int", "(", "first", ".", "strip", "(", ")", ")", "last", "=", "int", "(", "last", ".", "strip", "(", ")", ")", "if", "last", "<", "first", ":", "# assume it's a count", "last", "=", "first", "+", "last", "else", ":", "first", "=", "int", "(", "arg", ".", "strip", "(", ")", ")", "first", "=", "max", "(", "1", ",", "first", "-", "5", ")", "except", "ValueError", ":", "self", ".", "error", "(", "'Error in argument: %r'", "%", "arg", ")", "return", "elif", "self", ".", "lineno", "is", "None", "or", "arg", "==", "'.'", ":", "first", "=", "max", "(", "1", ",", "self", ".", "curframe", ".", "f_lineno", "-", "5", ")", "else", ":", "first", "=", "self", ".", "lineno", "+", "1", "if", "last", "is", "None", ":", "last", "=", "first", "+", "10", "filename", "=", "self", ".", "curframe", ".", "f_code", ".", "co_filename", "breaklist", "=", "self", ".", "get_file_breaks", "(", "filename", ")", "try", ":", "lines", "=", "linecache", ".", "getlines", "(", "filename", ",", "self", ".", "curframe", ".", "f_globals", ")", "self", ".", "_print_lines", "(", "lines", "[", "first", "-", "1", ":", "last", "]", ",", "first", ",", "breaklist", ",", "self", ".", "curframe", ")", "self", ".", "lineno", "=", "min", "(", "last", ",", "len", "(", "lines", ")", ")", "if", "len", "(", "lines", ")", "<", "last", ":", "self", ".", "message", "(", "'[EOF]'", ")", "except", "KeyboardInterrupt", ":", "pass" ]
l(ist) [first [,last] | .] List source code for the current file. Without arguments, list 11 lines around the current line or continue the previous listing. With . as argument, list 11 lines around the current line. With one argument, list 11 lines starting at that line. With two arguments, list the given range; if the second argument is less than the first, it is a count. The current line in the current frame is indicated by "->". If an exception is being debugged, the line where the exception was originally raised or propagated is indicated by ">>", if it differs from the current line.
[ "l", "(", "ist", ")", "[", "first", "[", "last", "]", "|", ".", "]" ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1443-L1491
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_longlist
def do_longlist(self, arg): """longlist | ll List the whole source code for the current function or frame. """ filename = self.curframe.f_code.co_filename breaklist = self.get_file_breaks(filename) try: lines, lineno = getsourcelines(self.curframe, self.get_locals(self.curframe)) except IOError as err: self.error(err) return self._print_lines(lines, lineno, breaklist, self.curframe)
python
def do_longlist(self, arg): """longlist | ll List the whole source code for the current function or frame. """ filename = self.curframe.f_code.co_filename breaklist = self.get_file_breaks(filename) try: lines, lineno = getsourcelines(self.curframe, self.get_locals(self.curframe)) except IOError as err: self.error(err) return self._print_lines(lines, lineno, breaklist, self.curframe)
[ "def", "do_longlist", "(", "self", ",", "arg", ")", ":", "filename", "=", "self", ".", "curframe", ".", "f_code", ".", "co_filename", "breaklist", "=", "self", ".", "get_file_breaks", "(", "filename", ")", "try", ":", "lines", ",", "lineno", "=", "getsourcelines", "(", "self", ".", "curframe", ",", "self", ".", "get_locals", "(", "self", ".", "curframe", ")", ")", "except", "IOError", "as", "err", ":", "self", ".", "error", "(", "err", ")", "return", "self", ".", "_print_lines", "(", "lines", ",", "lineno", ",", "breaklist", ",", "self", ".", "curframe", ")" ]
longlist | ll List the whole source code for the current function or frame.
[ "longlist", "|", "ll", "List", "the", "whole", "source", "code", "for", "the", "current", "function", "or", "frame", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1494-L1506
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_source
def do_source(self, arg): """source expression Try to get source code for the given object and display it. """ try: obj = self._getval(arg) except Exception: return try: lines, lineno = getsourcelines(obj, self.get_locals(self.curframe)) except (IOError, TypeError) as err: self.error(err) return self._print_lines(lines, lineno)
python
def do_source(self, arg): """source expression Try to get source code for the given object and display it. """ try: obj = self._getval(arg) except Exception: return try: lines, lineno = getsourcelines(obj, self.get_locals(self.curframe)) except (IOError, TypeError) as err: self.error(err) return self._print_lines(lines, lineno)
[ "def", "do_source", "(", "self", ",", "arg", ")", ":", "try", ":", "obj", "=", "self", ".", "_getval", "(", "arg", ")", "except", "Exception", ":", "return", "try", ":", "lines", ",", "lineno", "=", "getsourcelines", "(", "obj", ",", "self", ".", "get_locals", "(", "self", ".", "curframe", ")", ")", "except", "(", "IOError", ",", "TypeError", ")", "as", "err", ":", "self", ".", "error", "(", "err", ")", "return", "self", ".", "_print_lines", "(", "lines", ",", "lineno", ")" ]
source expression Try to get source code for the given object and display it.
[ "source", "expression", "Try", "to", "get", "source", "code", "for", "the", "given", "object", "and", "display", "it", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1509-L1522
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb._print_lines
def _print_lines(self, lines, start, breaks=(), frame=None): """Print a range of lines.""" if frame: current_lineno = frame.f_lineno exc_lineno = self.tb_lineno.get(frame, -1) else: current_lineno = exc_lineno = -1 for lineno, line in enumerate(lines, start): s = str(lineno).rjust(3) if len(s) < 4: s += ' ' if lineno in breaks: s += 'B' else: s += ' ' if lineno == current_lineno: s += '->' elif lineno == exc_lineno: s += '>>' self.message(s + '\t' + line.rstrip())
python
def _print_lines(self, lines, start, breaks=(), frame=None): """Print a range of lines.""" if frame: current_lineno = frame.f_lineno exc_lineno = self.tb_lineno.get(frame, -1) else: current_lineno = exc_lineno = -1 for lineno, line in enumerate(lines, start): s = str(lineno).rjust(3) if len(s) < 4: s += ' ' if lineno in breaks: s += 'B' else: s += ' ' if lineno == current_lineno: s += '->' elif lineno == exc_lineno: s += '>>' self.message(s + '\t' + line.rstrip())
[ "def", "_print_lines", "(", "self", ",", "lines", ",", "start", ",", "breaks", "=", "(", ")", ",", "frame", "=", "None", ")", ":", "if", "frame", ":", "current_lineno", "=", "frame", ".", "f_lineno", "exc_lineno", "=", "self", ".", "tb_lineno", ".", "get", "(", "frame", ",", "-", "1", ")", "else", ":", "current_lineno", "=", "exc_lineno", "=", "-", "1", "for", "lineno", ",", "line", "in", "enumerate", "(", "lines", ",", "start", ")", ":", "s", "=", "str", "(", "lineno", ")", ".", "rjust", "(", "3", ")", "if", "len", "(", "s", ")", "<", "4", ":", "s", "+=", "' '", "if", "lineno", "in", "breaks", ":", "s", "+=", "'B'", "else", ":", "s", "+=", "' '", "if", "lineno", "==", "current_lineno", ":", "s", "+=", "'->'", "elif", "lineno", "==", "exc_lineno", ":", "s", "+=", "'>>'", "self", ".", "message", "(", "s", "+", "'\\t'", "+", "line", ".", "rstrip", "(", ")", ")" ]
Print a range of lines.
[ "Print", "a", "range", "of", "lines", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1526-L1545
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_whatis
def do_whatis(self, arg): """whatis arg Print the type of the argument. """ try: value = self._getval(arg) except Exception: # _getval() already printed the error return code = None # Is it a function? try: code = value.__code__ except Exception: pass if code: self.message('Function %s' % code.co_name) return # Is it an instance method? try: code = value.__func__.__code__ except Exception: pass if code: self.message('Method %s' % code.co_name) return # Is it a class? if value.__class__ is type: self.message('Class %s.%s' % (value.__module__, value.__name__)) return # None of the above... self.message(type(value))
python
def do_whatis(self, arg): """whatis arg Print the type of the argument. """ try: value = self._getval(arg) except Exception: # _getval() already printed the error return code = None # Is it a function? try: code = value.__code__ except Exception: pass if code: self.message('Function %s' % code.co_name) return # Is it an instance method? try: code = value.__func__.__code__ except Exception: pass if code: self.message('Method %s' % code.co_name) return # Is it a class? if value.__class__ is type: self.message('Class %s.%s' % (value.__module__, value.__name__)) return # None of the above... self.message(type(value))
[ "def", "do_whatis", "(", "self", ",", "arg", ")", ":", "try", ":", "value", "=", "self", ".", "_getval", "(", "arg", ")", "except", "Exception", ":", "# _getval() already printed the error", "return", "code", "=", "None", "# Is it a function?", "try", ":", "code", "=", "value", ".", "__code__", "except", "Exception", ":", "pass", "if", "code", ":", "self", ".", "message", "(", "'Function %s'", "%", "code", ".", "co_name", ")", "return", "# Is it an instance method?", "try", ":", "code", "=", "value", ".", "__func__", ".", "__code__", "except", "Exception", ":", "pass", "if", "code", ":", "self", ".", "message", "(", "'Method %s'", "%", "code", ".", "co_name", ")", "return", "# Is it a class?", "if", "value", ".", "__class__", "is", "type", ":", "self", ".", "message", "(", "'Class %s.%s'", "%", "(", "value", ".", "__module__", ",", "value", ".", "__name__", ")", ")", "return", "# None of the above...", "self", ".", "message", "(", "type", "(", "value", ")", ")" ]
whatis arg Print the type of the argument.
[ "whatis", "arg", "Print", "the", "type", "of", "the", "argument", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1547-L1578
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_display
def do_display(self, arg): """display [expression] Display the value of the expression if it changed, each time execution stops in the current frame. Without expression, list all display expressions for the current frame. """ if not arg: self.message('Currently displaying:') for item in self.displaying.get(self.curframe, {}).items(): self.message('%s: %s' % bdb.safe_repr(item)) else: val = self._getval_except(arg) self.displaying.setdefault(self.curframe, {})[arg] = val self.message('display %s: %s' % (arg, bdb.safe_repr(val)))
python
def do_display(self, arg): """display [expression] Display the value of the expression if it changed, each time execution stops in the current frame. Without expression, list all display expressions for the current frame. """ if not arg: self.message('Currently displaying:') for item in self.displaying.get(self.curframe, {}).items(): self.message('%s: %s' % bdb.safe_repr(item)) else: val = self._getval_except(arg) self.displaying.setdefault(self.curframe, {})[arg] = val self.message('display %s: %s' % (arg, bdb.safe_repr(val)))
[ "def", "do_display", "(", "self", ",", "arg", ")", ":", "if", "not", "arg", ":", "self", ".", "message", "(", "'Currently displaying:'", ")", "for", "item", "in", "self", ".", "displaying", ".", "get", "(", "self", ".", "curframe", ",", "{", "}", ")", ".", "items", "(", ")", ":", "self", ".", "message", "(", "'%s: %s'", "%", "bdb", ".", "safe_repr", "(", "item", ")", ")", "else", ":", "val", "=", "self", ".", "_getval_except", "(", "arg", ")", "self", ".", "displaying", ".", "setdefault", "(", "self", ".", "curframe", ",", "{", "}", ")", "[", "arg", "]", "=", "val", "self", ".", "message", "(", "'display %s: %s'", "%", "(", "arg", ",", "bdb", ".", "safe_repr", "(", "val", ")", ")", ")" ]
display [expression] Display the value of the expression if it changed, each time execution stops in the current frame. Without expression, list all display expressions for the current frame.
[ "display", "[", "expression", "]" ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1582-L1597
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_undisplay
def do_undisplay(self, arg): """undisplay [expression] Do not display the expression any more in the current frame. Without expression, clear all display expressions for the current frame. """ if arg: try: del self.displaying.get(self.curframe, {})[arg] except KeyError: self.error('not displaying %s' % arg) else: self.displaying.pop(self.curframe, None)
python
def do_undisplay(self, arg): """undisplay [expression] Do not display the expression any more in the current frame. Without expression, clear all display expressions for the current frame. """ if arg: try: del self.displaying.get(self.curframe, {})[arg] except KeyError: self.error('not displaying %s' % arg) else: self.displaying.pop(self.curframe, None)
[ "def", "do_undisplay", "(", "self", ",", "arg", ")", ":", "if", "arg", ":", "try", ":", "del", "self", ".", "displaying", ".", "get", "(", "self", ".", "curframe", ",", "{", "}", ")", "[", "arg", "]", "except", "KeyError", ":", "self", ".", "error", "(", "'not displaying %s'", "%", "arg", ")", "else", ":", "self", ".", "displaying", ".", "pop", "(", "self", ".", "curframe", ",", "None", ")" ]
undisplay [expression] Do not display the expression any more in the current frame. Without expression, clear all display expressions for the current frame.
[ "undisplay", "[", "expression", "]" ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1601-L1614
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_interact
def do_interact(self, arg): """interact Start an interative interpreter whose global namespace contains all the (global and local) names found in the current scope. """ def readfunc(prompt): self.stdout.write(prompt) self.stdout.flush() line = self.stdin.readline() line = line.rstrip('\r\n') if line == 'EOF': raise EOFError return line ns = self.curframe.f_globals.copy() ns.update(self.get_locals(self.curframe)) if isinstance(self.stdin, RemoteSocket): # Main interpreter redirection of the code module. if PY3: import sys as _sys else: # Parent module 'pdb_clone' not found while handling absolute # import. _sys = __import__('sys', level=0) code.sys = _sys self.redirect(code.interact, local=ns, readfunc=readfunc) else: code.interact("*interactive*", local=ns)
python
def do_interact(self, arg): """interact Start an interative interpreter whose global namespace contains all the (global and local) names found in the current scope. """ def readfunc(prompt): self.stdout.write(prompt) self.stdout.flush() line = self.stdin.readline() line = line.rstrip('\r\n') if line == 'EOF': raise EOFError return line ns = self.curframe.f_globals.copy() ns.update(self.get_locals(self.curframe)) if isinstance(self.stdin, RemoteSocket): # Main interpreter redirection of the code module. if PY3: import sys as _sys else: # Parent module 'pdb_clone' not found while handling absolute # import. _sys = __import__('sys', level=0) code.sys = _sys self.redirect(code.interact, local=ns, readfunc=readfunc) else: code.interact("*interactive*", local=ns)
[ "def", "do_interact", "(", "self", ",", "arg", ")", ":", "def", "readfunc", "(", "prompt", ")", ":", "self", ".", "stdout", ".", "write", "(", "prompt", ")", "self", ".", "stdout", ".", "flush", "(", ")", "line", "=", "self", ".", "stdin", ".", "readline", "(", ")", "line", "=", "line", ".", "rstrip", "(", "'\\r\\n'", ")", "if", "line", "==", "'EOF'", ":", "raise", "EOFError", "return", "line", "ns", "=", "self", ".", "curframe", ".", "f_globals", ".", "copy", "(", ")", "ns", ".", "update", "(", "self", ".", "get_locals", "(", "self", ".", "curframe", ")", ")", "if", "isinstance", "(", "self", ".", "stdin", ",", "RemoteSocket", ")", ":", "# Main interpreter redirection of the code module.", "if", "PY3", ":", "import", "sys", "as", "_sys", "else", ":", "# Parent module 'pdb_clone' not found while handling absolute", "# import.", "_sys", "=", "__import__", "(", "'sys'", ",", "level", "=", "0", ")", "code", ".", "sys", "=", "_sys", "self", ".", "redirect", "(", "code", ".", "interact", ",", "local", "=", "ns", ",", "readfunc", "=", "readfunc", ")", "else", ":", "code", ".", "interact", "(", "\"*interactive*\"", ",", "local", "=", "ns", ")" ]
interact Start an interative interpreter whose global namespace contains all the (global and local) names found in the current scope.
[ "interact" ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1620-L1648
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_alias
def do_alias(self, arg): """alias [name [command [parameter parameter ...] ]] Create an alias called 'name' that executes 'command'. The command must *not* be enclosed in quotes. Replaceable parameters can be indicated by %1, %2, and so on, while %* is replaced by all the parameters. If no command is given, the current alias for name is shown. If no name is given, all aliases are listed. Aliases may be nested and can contain anything that can be legally typed at the pdb prompt. Note! You *can* override internal pdb commands with aliases! Those internal commands are then hidden until the alias is removed. Aliasing is recursively applied to the first word of the command line; all other words in the line are left alone. As an example, here are two useful aliases (especially when placed in the .pdbrc file): # Print instance variables (usage "pi classInst") alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) # Print instance variables in self alias ps pi self """ args = arg.split() if len(args) == 0: keys = sorted(self.aliases.keys()) for alias in keys: self.message("%s = %s" % (alias, self.aliases[alias])) return if args[0] in self.aliases and len(args) == 1: self.message("%s = %s" % (args[0], self.aliases[args[0]])) else: self.aliases[args[0]] = ' '.join(args[1:])
python
def do_alias(self, arg): """alias [name [command [parameter parameter ...] ]] Create an alias called 'name' that executes 'command'. The command must *not* be enclosed in quotes. Replaceable parameters can be indicated by %1, %2, and so on, while %* is replaced by all the parameters. If no command is given, the current alias for name is shown. If no name is given, all aliases are listed. Aliases may be nested and can contain anything that can be legally typed at the pdb prompt. Note! You *can* override internal pdb commands with aliases! Those internal commands are then hidden until the alias is removed. Aliasing is recursively applied to the first word of the command line; all other words in the line are left alone. As an example, here are two useful aliases (especially when placed in the .pdbrc file): # Print instance variables (usage "pi classInst") alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) # Print instance variables in self alias ps pi self """ args = arg.split() if len(args) == 0: keys = sorted(self.aliases.keys()) for alias in keys: self.message("%s = %s" % (alias, self.aliases[alias])) return if args[0] in self.aliases and len(args) == 1: self.message("%s = %s" % (args[0], self.aliases[args[0]])) else: self.aliases[args[0]] = ' '.join(args[1:])
[ "def", "do_alias", "(", "self", ",", "arg", ")", ":", "args", "=", "arg", ".", "split", "(", ")", "if", "len", "(", "args", ")", "==", "0", ":", "keys", "=", "sorted", "(", "self", ".", "aliases", ".", "keys", "(", ")", ")", "for", "alias", "in", "keys", ":", "self", ".", "message", "(", "\"%s = %s\"", "%", "(", "alias", ",", "self", ".", "aliases", "[", "alias", "]", ")", ")", "return", "if", "args", "[", "0", "]", "in", "self", ".", "aliases", "and", "len", "(", "args", ")", "==", "1", ":", "self", ".", "message", "(", "\"%s = %s\"", "%", "(", "args", "[", "0", "]", ",", "self", ".", "aliases", "[", "args", "[", "0", "]", "]", ")", ")", "else", ":", "self", ".", "aliases", "[", "args", "[", "0", "]", "]", "=", "' '", ".", "join", "(", "args", "[", "1", ":", "]", ")" ]
alias [name [command [parameter parameter ...] ]] Create an alias called 'name' that executes 'command'. The command must *not* be enclosed in quotes. Replaceable parameters can be indicated by %1, %2, and so on, while %* is replaced by all the parameters. If no command is given, the current alias for name is shown. If no name is given, all aliases are listed. Aliases may be nested and can contain anything that can be legally typed at the pdb prompt. Note! You *can* override internal pdb commands with aliases! Those internal commands are then hidden until the alias is removed. Aliasing is recursively applied to the first word of the command line; all other words in the line are left alone. As an example, here are two useful aliases (especially when placed in the .pdbrc file): # Print instance variables (usage "pi classInst") alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) # Print instance variables in self alias ps pi self
[ "alias", "[", "name", "[", "command", "[", "parameter", "parameter", "...", "]", "]]", "Create", "an", "alias", "called", "name", "that", "executes", "command", ".", "The", "command", "must", "*", "not", "*", "be", "enclosed", "in", "quotes", ".", "Replaceable", "parameters", "can", "be", "indicated", "by", "%1", "%2", "and", "so", "on", "while", "%", "*", "is", "replaced", "by", "all", "the", "parameters", ".", "If", "no", "command", "is", "given", "the", "current", "alias", "for", "name", "is", "shown", ".", "If", "no", "name", "is", "given", "all", "aliases", "are", "listed", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1650-L1683
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_unalias
def do_unalias(self, arg): """unalias name Delete the specified alias. """ args = arg.split() if len(args) == 0: return if args[0] in self.aliases: del self.aliases[args[0]]
python
def do_unalias(self, arg): """unalias name Delete the specified alias. """ args = arg.split() if len(args) == 0: return if args[0] in self.aliases: del self.aliases[args[0]]
[ "def", "do_unalias", "(", "self", ",", "arg", ")", ":", "args", "=", "arg", ".", "split", "(", ")", "if", "len", "(", "args", ")", "==", "0", ":", "return", "if", "args", "[", "0", "]", "in", "self", ".", "aliases", ":", "del", "self", ".", "aliases", "[", "args", "[", "0", "]", "]" ]
unalias name Delete the specified alias.
[ "unalias", "name", "Delete", "the", "specified", "alias", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1685-L1692
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_thread
def do_thread(self, arg): """th(read) [threadnumber] Without argument, display a summary of all active threads. The summary prints for each thread: 1. the thread number assigned by pdb 2. the thread name 3. the python thread identifier 4. the current stack frame summary for that thread An asterisk '*' to the left of the pdb thread number indicates the current thread, a plus sign '+' indicates the thread being traced by pdb. With a pdb thread number as argument, make this thread the current thread. The 'where', 'up' and 'down' commands apply now to the frame stack of this thread. The current scope is now the frame currently executed by this thread at the time the command is issued and the 'list', 'll', 'args', 'p', 'pp', 'source' and 'interact' commands are run in the context of that frame. Note that this frame may bear no relationship (for a non-deadlocked thread) to that thread's current activity by the time you are examining the frame. This command does not stop the thread. """ # Import the threading module in the main interpreter to get an # enumeration of the main interpreter threads. if PY3: try: import threading except ImportError: import dummy_threading as threading else: # Do not use relative import detection to avoid the RuntimeWarning: # Parent module 'pdb_clone' not found while handling absolute # import. try: threading = __import__('threading', level=0) except ImportError: threading = __import__('dummy_threading', level=0) if not self.pdb_thread: self.pdb_thread = threading.current_thread() if not self.current_thread: self.current_thread = self.pdb_thread current_frames = sys._current_frames() tlist = sorted(threading.enumerate(), key=attrgetter('name', 'ident')) try: self._do_thread(arg, current_frames, tlist) finally: # For some reason this local must be explicitly deleted in order # to release the subinterpreter. del current_frames
python
def do_thread(self, arg): """th(read) [threadnumber] Without argument, display a summary of all active threads. The summary prints for each thread: 1. the thread number assigned by pdb 2. the thread name 3. the python thread identifier 4. the current stack frame summary for that thread An asterisk '*' to the left of the pdb thread number indicates the current thread, a plus sign '+' indicates the thread being traced by pdb. With a pdb thread number as argument, make this thread the current thread. The 'where', 'up' and 'down' commands apply now to the frame stack of this thread. The current scope is now the frame currently executed by this thread at the time the command is issued and the 'list', 'll', 'args', 'p', 'pp', 'source' and 'interact' commands are run in the context of that frame. Note that this frame may bear no relationship (for a non-deadlocked thread) to that thread's current activity by the time you are examining the frame. This command does not stop the thread. """ # Import the threading module in the main interpreter to get an # enumeration of the main interpreter threads. if PY3: try: import threading except ImportError: import dummy_threading as threading else: # Do not use relative import detection to avoid the RuntimeWarning: # Parent module 'pdb_clone' not found while handling absolute # import. try: threading = __import__('threading', level=0) except ImportError: threading = __import__('dummy_threading', level=0) if not self.pdb_thread: self.pdb_thread = threading.current_thread() if not self.current_thread: self.current_thread = self.pdb_thread current_frames = sys._current_frames() tlist = sorted(threading.enumerate(), key=attrgetter('name', 'ident')) try: self._do_thread(arg, current_frames, tlist) finally: # For some reason this local must be explicitly deleted in order # to release the subinterpreter. del current_frames
[ "def", "do_thread", "(", "self", ",", "arg", ")", ":", "# Import the threading module in the main interpreter to get an", "# enumeration of the main interpreter threads.", "if", "PY3", ":", "try", ":", "import", "threading", "except", "ImportError", ":", "import", "dummy_threading", "as", "threading", "else", ":", "# Do not use relative import detection to avoid the RuntimeWarning:", "# Parent module 'pdb_clone' not found while handling absolute", "# import.", "try", ":", "threading", "=", "__import__", "(", "'threading'", ",", "level", "=", "0", ")", "except", "ImportError", ":", "threading", "=", "__import__", "(", "'dummy_threading'", ",", "level", "=", "0", ")", "if", "not", "self", ".", "pdb_thread", ":", "self", ".", "pdb_thread", "=", "threading", ".", "current_thread", "(", ")", "if", "not", "self", ".", "current_thread", ":", "self", ".", "current_thread", "=", "self", ".", "pdb_thread", "current_frames", "=", "sys", ".", "_current_frames", "(", ")", "tlist", "=", "sorted", "(", "threading", ".", "enumerate", "(", ")", ",", "key", "=", "attrgetter", "(", "'name'", ",", "'ident'", ")", ")", "try", ":", "self", ".", "_do_thread", "(", "arg", ",", "current_frames", ",", "tlist", ")", "finally", ":", "# For some reason this local must be explicitly deleted in order", "# to release the subinterpreter.", "del", "current_frames" ]
th(read) [threadnumber] Without argument, display a summary of all active threads. The summary prints for each thread: 1. the thread number assigned by pdb 2. the thread name 3. the python thread identifier 4. the current stack frame summary for that thread An asterisk '*' to the left of the pdb thread number indicates the current thread, a plus sign '+' indicates the thread being traced by pdb. With a pdb thread number as argument, make this thread the current thread. The 'where', 'up' and 'down' commands apply now to the frame stack of this thread. The current scope is now the frame currently executed by this thread at the time the command is issued and the 'list', 'll', 'args', 'p', 'pp', 'source' and 'interact' commands are run in the context of that frame. Note that this frame may bear no relationship (for a non-deadlocked thread) to that thread's current activity by the time you are examining the frame. This command does not stop the thread.
[ "th", "(", "read", ")", "[", "threadnumber", "]", "Without", "argument", "display", "a", "summary", "of", "all", "active", "threads", ".", "The", "summary", "prints", "for", "each", "thread", ":", "1", ".", "the", "thread", "number", "assigned", "by", "pdb", "2", ".", "the", "thread", "name", "3", ".", "the", "python", "thread", "identifier", "4", ".", "the", "current", "stack", "frame", "summary", "for", "that", "thread", "An", "asterisk", "*", "to", "the", "left", "of", "the", "pdb", "thread", "number", "indicates", "the", "current", "thread", "a", "plus", "sign", "+", "indicates", "the", "thread", "being", "traced", "by", "pdb", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1740-L1790
corpusops/pdbclone
lib/pdb_clone/pdb.py
Pdb.do_help
def do_help(self, arg): """h(elp) Without argument, print the list of available commands. With a command name as argument, print help about that command. "help pdb" shows the full pdb documentation. "help exec" gives help on the ! command. """ if not arg: return cmd.Cmd.do_help(self, arg) try: try: topic = getattr(self, 'help_' + arg) return topic() except AttributeError: command = getattr(self, 'do_' + arg) except AttributeError: self.error('No help for %r' % arg) else: if sys.flags.optimize >= 2: self.error('No help for %r; please do not run Python with -OO ' 'if you need command help' % arg) return self.message(command.__doc__.rstrip())
python
def do_help(self, arg): """h(elp) Without argument, print the list of available commands. With a command name as argument, print help about that command. "help pdb" shows the full pdb documentation. "help exec" gives help on the ! command. """ if not arg: return cmd.Cmd.do_help(self, arg) try: try: topic = getattr(self, 'help_' + arg) return topic() except AttributeError: command = getattr(self, 'do_' + arg) except AttributeError: self.error('No help for %r' % arg) else: if sys.flags.optimize >= 2: self.error('No help for %r; please do not run Python with -OO ' 'if you need command help' % arg) return self.message(command.__doc__.rstrip())
[ "def", "do_help", "(", "self", ",", "arg", ")", ":", "if", "not", "arg", ":", "return", "cmd", ".", "Cmd", ".", "do_help", "(", "self", ",", "arg", ")", "try", ":", "try", ":", "topic", "=", "getattr", "(", "self", ",", "'help_'", "+", "arg", ")", "return", "topic", "(", ")", "except", "AttributeError", ":", "command", "=", "getattr", "(", "self", ",", "'do_'", "+", "arg", ")", "except", "AttributeError", ":", "self", ".", "error", "(", "'No help for %r'", "%", "arg", ")", "else", ":", "if", "sys", ".", "flags", ".", "optimize", ">=", "2", ":", "self", ".", "error", "(", "'No help for %r; please do not run Python with -OO '", "'if you need command help'", "%", "arg", ")", "return", "self", ".", "message", "(", "command", ".", "__doc__", ".", "rstrip", "(", ")", ")" ]
h(elp) Without argument, print the list of available commands. With a command name as argument, print help about that command. "help pdb" shows the full pdb documentation. "help exec" gives help on the ! command.
[ "h", "(", "elp", ")", "Without", "argument", "print", "the", "list", "of", "available", "commands", ".", "With", "a", "command", "name", "as", "argument", "print", "help", "about", "that", "command", ".", "help", "pdb", "shows", "the", "full", "pdb", "documentation", ".", "help", "exec", "gives", "help", "on", "the", "!", "command", "." ]
train
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdb.py#L1825-L1847
MediaFire/mediafire-python-open-sdk
mediafire/subsetio.py
SubsetIO.read
def read(self, limit=-1): """Read content. See file.read""" remaining = self.len - self.parent_fd.tell() + self.offset if limit > remaining or limit == -1: limit = remaining return self.parent_fd.read(limit)
python
def read(self, limit=-1): """Read content. See file.read""" remaining = self.len - self.parent_fd.tell() + self.offset if limit > remaining or limit == -1: limit = remaining return self.parent_fd.read(limit)
[ "def", "read", "(", "self", ",", "limit", "=", "-", "1", ")", ":", "remaining", "=", "self", ".", "len", "-", "self", ".", "parent_fd", ".", "tell", "(", ")", "+", "self", ".", "offset", "if", "limit", ">", "remaining", "or", "limit", "==", "-", "1", ":", "limit", "=", "remaining", "return", "self", ".", "parent_fd", ".", "read", "(", "limit", ")" ]
Read content. See file.read
[ "Read", "content", ".", "See", "file", ".", "read" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/subsetio.py#L48-L55
MediaFire/mediafire-python-open-sdk
mediafire/subsetio.py
SubsetIO.seek
def seek(self, offset, whence=os.SEEK_SET): """Seek to position in stream, see file.seek""" pos = None if whence == os.SEEK_SET: pos = self.offset + offset elif whence == os.SEEK_CUR: pos = self.tell() + offset elif whence == os.SEEK_END: pos = self.offset + self.len + offset else: raise ValueError("invalid whence {}".format(whence)) if pos > self.offset + self.len or pos < self.offset: raise ValueError("seek position beyond chunk area") self.parent_fd.seek(pos, os.SEEK_SET)
python
def seek(self, offset, whence=os.SEEK_SET): """Seek to position in stream, see file.seek""" pos = None if whence == os.SEEK_SET: pos = self.offset + offset elif whence == os.SEEK_CUR: pos = self.tell() + offset elif whence == os.SEEK_END: pos = self.offset + self.len + offset else: raise ValueError("invalid whence {}".format(whence)) if pos > self.offset + self.len or pos < self.offset: raise ValueError("seek position beyond chunk area") self.parent_fd.seek(pos, os.SEEK_SET)
[ "def", "seek", "(", "self", ",", "offset", ",", "whence", "=", "os", ".", "SEEK_SET", ")", ":", "pos", "=", "None", "if", "whence", "==", "os", ".", "SEEK_SET", ":", "pos", "=", "self", ".", "offset", "+", "offset", "elif", "whence", "==", "os", ".", "SEEK_CUR", ":", "pos", "=", "self", ".", "tell", "(", ")", "+", "offset", "elif", "whence", "==", "os", ".", "SEEK_END", ":", "pos", "=", "self", ".", "offset", "+", "self", ".", "len", "+", "offset", "else", ":", "raise", "ValueError", "(", "\"invalid whence {}\"", ".", "format", "(", "whence", ")", ")", "if", "pos", ">", "self", ".", "offset", "+", "self", ".", "len", "or", "pos", "<", "self", ".", "offset", ":", "raise", "ValueError", "(", "\"seek position beyond chunk area\"", ")", "self", ".", "parent_fd", ".", "seek", "(", "pos", ",", "os", ".", "SEEK_SET", ")" ]
Seek to position in stream, see file.seek
[ "Seek", "to", "position", "in", "stream", "see", "file", ".", "seek" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/subsetio.py#L57-L73
MediaFire/mediafire-python-open-sdk
mediafire/subsetio.py
SubsetIO.close
def close(self): """Close file, see file.close""" try: self.parent_fd.fileno() except io.UnsupportedOperation: logger.debug("Not closing parent_fd - reusing existing") else: self.parent_fd.close()
python
def close(self): """Close file, see file.close""" try: self.parent_fd.fileno() except io.UnsupportedOperation: logger.debug("Not closing parent_fd - reusing existing") else: self.parent_fd.close()
[ "def", "close", "(", "self", ")", ":", "try", ":", "self", ".", "parent_fd", ".", "fileno", "(", ")", "except", "io", ".", "UnsupportedOperation", ":", "logger", ".", "debug", "(", "\"Not closing parent_fd - reusing existing\"", ")", "else", ":", "self", ".", "parent_fd", ".", "close", "(", ")" ]
Close file, see file.close
[ "Close", "file", "see", "file", ".", "close" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/subsetio.py#L80-L87
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi._build_query
def _build_query(self, uri, params=None, action_token_type=None): """Prepare query string""" if params is None: params = QueryParams() params['response_format'] = 'json' session_token = None if action_token_type in self._action_tokens: # Favor action token using_action_token = True session_token = self._action_tokens[action_token_type] else: using_action_token = False if self._session: session_token = self._session['session_token'] if session_token: params['session_token'] = session_token # make order of parameters predictable for testing keys = list(params.keys()) keys.sort() query = urlencode([tuple([key, params[key]]) for key in keys]) if not using_action_token and self._session: secret_key_mod = int(self._session['secret_key']) % 256 signature_base = (str(secret_key_mod) + self._session['time'] + uri + '?' + query).encode('ascii') query += '&signature=' + hashlib.md5(signature_base).hexdigest() return query
python
def _build_query(self, uri, params=None, action_token_type=None): """Prepare query string""" if params is None: params = QueryParams() params['response_format'] = 'json' session_token = None if action_token_type in self._action_tokens: # Favor action token using_action_token = True session_token = self._action_tokens[action_token_type] else: using_action_token = False if self._session: session_token = self._session['session_token'] if session_token: params['session_token'] = session_token # make order of parameters predictable for testing keys = list(params.keys()) keys.sort() query = urlencode([tuple([key, params[key]]) for key in keys]) if not using_action_token and self._session: secret_key_mod = int(self._session['secret_key']) % 256 signature_base = (str(secret_key_mod) + self._session['time'] + uri + '?' + query).encode('ascii') query += '&signature=' + hashlib.md5(signature_base).hexdigest() return query
[ "def", "_build_query", "(", "self", ",", "uri", ",", "params", "=", "None", ",", "action_token_type", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "QueryParams", "(", ")", "params", "[", "'response_format'", "]", "=", "'json'", "session_token", "=", "None", "if", "action_token_type", "in", "self", ".", "_action_tokens", ":", "# Favor action token", "using_action_token", "=", "True", "session_token", "=", "self", ".", "_action_tokens", "[", "action_token_type", "]", "else", ":", "using_action_token", "=", "False", "if", "self", ".", "_session", ":", "session_token", "=", "self", ".", "_session", "[", "'session_token'", "]", "if", "session_token", ":", "params", "[", "'session_token'", "]", "=", "session_token", "# make order of parameters predictable for testing", "keys", "=", "list", "(", "params", ".", "keys", "(", ")", ")", "keys", ".", "sort", "(", ")", "query", "=", "urlencode", "(", "[", "tuple", "(", "[", "key", ",", "params", "[", "key", "]", "]", ")", "for", "key", "in", "keys", "]", ")", "if", "not", "using_action_token", "and", "self", ".", "_session", ":", "secret_key_mod", "=", "int", "(", "self", ".", "_session", "[", "'secret_key'", "]", ")", "%", "256", "signature_base", "=", "(", "str", "(", "secret_key_mod", ")", "+", "self", ".", "_session", "[", "'time'", "]", "+", "uri", "+", "'?'", "+", "query", ")", ".", "encode", "(", "'ascii'", ")", "query", "+=", "'&signature='", "+", "hashlib", ".", "md5", "(", "signature_base", ")", ".", "hexdigest", "(", ")", "return", "query" ]
Prepare query string
[ "Prepare", "query", "string" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L97-L134
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.request
def request(self, action, params=None, action_token_type=None, upload_info=None, headers=None): """Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically """ uri = self._build_uri(action) if isinstance(params, six.text_type): query = params else: query = self._build_query(uri, params, action_token_type) if headers is None: headers = {} if upload_info is None: # Use request body for query data = query headers['Content-Type'] = FORM_MIMETYPE else: # Use query string for query since payload is file uri += '?' + query if "filename" in upload_info: data = MultipartEncoder( fields={'file': ( upload_info["filename"], upload_info["fd"], UPLOAD_MIMETYPE )} ) headers["Content-Type"] = data.content_type else: data = upload_info["fd"] headers["Content-Type"] = UPLOAD_MIMETYPE logger.debug("uri=%s query=%s", uri, query if not upload_info else None) try: # bytes from now on url = (API_BASE + uri).encode('utf-8') if isinstance(data, six.text_type): # request's data is bytes, dict, or filehandle data = data.encode('utf-8') response = self.http.post(url, data=data, headers=headers, stream=True) except RequestException as ex: logger.exception("HTTP request failed") raise MediaFireConnectionError( "RequestException: {}".format(ex)) return self._process_response(response)
python
def request(self, action, params=None, action_token_type=None, upload_info=None, headers=None): """Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically """ uri = self._build_uri(action) if isinstance(params, six.text_type): query = params else: query = self._build_query(uri, params, action_token_type) if headers is None: headers = {} if upload_info is None: # Use request body for query data = query headers['Content-Type'] = FORM_MIMETYPE else: # Use query string for query since payload is file uri += '?' + query if "filename" in upload_info: data = MultipartEncoder( fields={'file': ( upload_info["filename"], upload_info["fd"], UPLOAD_MIMETYPE )} ) headers["Content-Type"] = data.content_type else: data = upload_info["fd"] headers["Content-Type"] = UPLOAD_MIMETYPE logger.debug("uri=%s query=%s", uri, query if not upload_info else None) try: # bytes from now on url = (API_BASE + uri).encode('utf-8') if isinstance(data, six.text_type): # request's data is bytes, dict, or filehandle data = data.encode('utf-8') response = self.http.post(url, data=data, headers=headers, stream=True) except RequestException as ex: logger.exception("HTTP request failed") raise MediaFireConnectionError( "RequestException: {}".format(ex)) return self._process_response(response)
[ "def", "request", "(", "self", ",", "action", ",", "params", "=", "None", ",", "action_token_type", "=", "None", ",", "upload_info", "=", "None", ",", "headers", "=", "None", ")", ":", "uri", "=", "self", ".", "_build_uri", "(", "action", ")", "if", "isinstance", "(", "params", ",", "six", ".", "text_type", ")", ":", "query", "=", "params", "else", ":", "query", "=", "self", ".", "_build_query", "(", "uri", ",", "params", ",", "action_token_type", ")", "if", "headers", "is", "None", ":", "headers", "=", "{", "}", "if", "upload_info", "is", "None", ":", "# Use request body for query", "data", "=", "query", "headers", "[", "'Content-Type'", "]", "=", "FORM_MIMETYPE", "else", ":", "# Use query string for query since payload is file", "uri", "+=", "'?'", "+", "query", "if", "\"filename\"", "in", "upload_info", ":", "data", "=", "MultipartEncoder", "(", "fields", "=", "{", "'file'", ":", "(", "upload_info", "[", "\"filename\"", "]", ",", "upload_info", "[", "\"fd\"", "]", ",", "UPLOAD_MIMETYPE", ")", "}", ")", "headers", "[", "\"Content-Type\"", "]", "=", "data", ".", "content_type", "else", ":", "data", "=", "upload_info", "[", "\"fd\"", "]", "headers", "[", "\"Content-Type\"", "]", "=", "UPLOAD_MIMETYPE", "logger", ".", "debug", "(", "\"uri=%s query=%s\"", ",", "uri", ",", "query", "if", "not", "upload_info", "else", "None", ")", "try", ":", "# bytes from now on", "url", "=", "(", "API_BASE", "+", "uri", ")", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "data", ",", "six", ".", "text_type", ")", ":", "# request's data is bytes, dict, or filehandle", "data", "=", "data", ".", "encode", "(", "'utf-8'", ")", "response", "=", "self", ".", "http", ".", "post", "(", "url", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "stream", "=", "True", ")", "except", "RequestException", "as", "ex", ":", "logger", ".", "exception", "(", "\"HTTP request failed\"", ")", "raise", "MediaFireConnectionError", "(", "\"RequestException: {}\"", ".", "format", "(", "ex", ")", ")", "return", "self", ".", "_process_response", "(", "response", ")" ]
Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically
[ "Perform", "request", "to", "MediaFire", "API" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L136-L197
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi._process_response
def _process_response(self, response): """Parse response""" forward_raw = False content_type = response.headers['Content-Type'] if content_type != 'application/json': logger.debug("headers: %s", response.headers) # API BUG: text/xml content-type with json payload # http://forum.mediafiredev.com/showthread.php?136 if content_type == 'text/xml': # we never request xml, so check it quacks like JSON if not response.text.lstrip().startswith('{'): forward_raw = True else: # _process_response can't deal with non-json, # return response as is forward_raw = True if forward_raw: response.raise_for_status() return response logger.debug("response: %s", response.text) # if we are here, then most likely have json try: response_node = response.json()['response'] except ValueError: # promised JSON but failed raise MediaFireApiError("JSON decode failure") if response_node.get('new_key', 'no') == 'yes': self._regenerate_secret_key() # check for errors if response_node['result'] != 'Success': raise MediaFireApiError(response_node['message'], response_node['error']) return response_node
python
def _process_response(self, response): """Parse response""" forward_raw = False content_type = response.headers['Content-Type'] if content_type != 'application/json': logger.debug("headers: %s", response.headers) # API BUG: text/xml content-type with json payload # http://forum.mediafiredev.com/showthread.php?136 if content_type == 'text/xml': # we never request xml, so check it quacks like JSON if not response.text.lstrip().startswith('{'): forward_raw = True else: # _process_response can't deal with non-json, # return response as is forward_raw = True if forward_raw: response.raise_for_status() return response logger.debug("response: %s", response.text) # if we are here, then most likely have json try: response_node = response.json()['response'] except ValueError: # promised JSON but failed raise MediaFireApiError("JSON decode failure") if response_node.get('new_key', 'no') == 'yes': self._regenerate_secret_key() # check for errors if response_node['result'] != 'Success': raise MediaFireApiError(response_node['message'], response_node['error']) return response_node
[ "def", "_process_response", "(", "self", ",", "response", ")", ":", "forward_raw", "=", "False", "content_type", "=", "response", ".", "headers", "[", "'Content-Type'", "]", "if", "content_type", "!=", "'application/json'", ":", "logger", ".", "debug", "(", "\"headers: %s\"", ",", "response", ".", "headers", ")", "# API BUG: text/xml content-type with json payload", "# http://forum.mediafiredev.com/showthread.php?136", "if", "content_type", "==", "'text/xml'", ":", "# we never request xml, so check it quacks like JSON", "if", "not", "response", ".", "text", ".", "lstrip", "(", ")", ".", "startswith", "(", "'{'", ")", ":", "forward_raw", "=", "True", "else", ":", "# _process_response can't deal with non-json,", "# return response as is", "forward_raw", "=", "True", "if", "forward_raw", ":", "response", ".", "raise_for_status", "(", ")", "return", "response", "logger", ".", "debug", "(", "\"response: %s\"", ",", "response", ".", "text", ")", "# if we are here, then most likely have json", "try", ":", "response_node", "=", "response", ".", "json", "(", ")", "[", "'response'", "]", "except", "ValueError", ":", "# promised JSON but failed", "raise", "MediaFireApiError", "(", "\"JSON decode failure\"", ")", "if", "response_node", ".", "get", "(", "'new_key'", ",", "'no'", ")", "==", "'yes'", ":", "self", ".", "_regenerate_secret_key", "(", ")", "# check for errors", "if", "response_node", "[", "'result'", "]", "!=", "'Success'", ":", "raise", "MediaFireApiError", "(", "response_node", "[", "'message'", "]", ",", "response_node", "[", "'error'", "]", ")", "return", "response_node" ]
Parse response
[ "Parse", "response" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L199-L238
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi._regenerate_secret_key
def _regenerate_secret_key(self): """Regenerate secret key http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature """ # Don't regenerate the key if we have none if self._session and 'secret_key' in self._session: self._session['secret_key'] = ( int(self._session['secret_key']) * 16807) % 2147483647
python
def _regenerate_secret_key(self): """Regenerate secret key http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature """ # Don't regenerate the key if we have none if self._session and 'secret_key' in self._session: self._session['secret_key'] = ( int(self._session['secret_key']) * 16807) % 2147483647
[ "def", "_regenerate_secret_key", "(", "self", ")", ":", "# Don't regenerate the key if we have none", "if", "self", ".", "_session", "and", "'secret_key'", "in", "self", ".", "_session", ":", "self", ".", "_session", "[", "'secret_key'", "]", "=", "(", "int", "(", "self", ".", "_session", "[", "'secret_key'", "]", ")", "*", "16807", ")", "%", "2147483647" ]
Regenerate secret key http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature
[ "Regenerate", "secret", "key" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L240-L248
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.session
def session(self, value): """Set session token value -- dict returned by user/get_session_token""" # unset session token if value is None: self._session = None return if not isinstance(value, dict): raise ValueError("session info is required") session_parsed = {} for key in ["session_token", "time", "secret_key"]: if key not in value: raise ValueError("Missing parameter: {}".format(key)) session_parsed[key] = value[key] for key in ["ekey", "pkey"]: # nice to have, but not mandatory if key in value: session_parsed[key] = value[key] self._session = session_parsed
python
def session(self, value): """Set session token value -- dict returned by user/get_session_token""" # unset session token if value is None: self._session = None return if not isinstance(value, dict): raise ValueError("session info is required") session_parsed = {} for key in ["session_token", "time", "secret_key"]: if key not in value: raise ValueError("Missing parameter: {}".format(key)) session_parsed[key] = value[key] for key in ["ekey", "pkey"]: # nice to have, but not mandatory if key in value: session_parsed[key] = value[key] self._session = session_parsed
[ "def", "session", "(", "self", ",", "value", ")", ":", "# unset session token", "if", "value", "is", "None", ":", "self", ".", "_session", "=", "None", "return", "if", "not", "isinstance", "(", "value", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"session info is required\"", ")", "session_parsed", "=", "{", "}", "for", "key", "in", "[", "\"session_token\"", ",", "\"time\"", ",", "\"secret_key\"", "]", ":", "if", "key", "not", "in", "value", ":", "raise", "ValueError", "(", "\"Missing parameter: {}\"", ".", "format", "(", "key", ")", ")", "session_parsed", "[", "key", "]", "=", "value", "[", "key", "]", "for", "key", "in", "[", "\"ekey\"", ",", "\"pkey\"", "]", ":", "# nice to have, but not mandatory", "if", "key", "in", "value", ":", "session_parsed", "[", "key", "]", "=", "value", "[", "key", "]", "self", ".", "_session", "=", "session_parsed" ]
Set session token value -- dict returned by user/get_session_token
[ "Set", "session", "token" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L256-L281
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.set_action_token
def set_action_token(self, type_=None, action_token=None): """Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token """ if action_token is None: del self._action_tokens[type_] else: self._action_tokens[type_] = action_token
python
def set_action_token(self, type_=None, action_token=None): """Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token """ if action_token is None: del self._action_tokens[type_] else: self._action_tokens[type_] = action_token
[ "def", "set_action_token", "(", "self", ",", "type_", "=", "None", ",", "action_token", "=", "None", ")", ":", "if", "action_token", "is", "None", ":", "del", "self", ".", "_action_tokens", "[", "type_", "]", "else", ":", "self", ".", "_action_tokens", "[", "type_", "]", "=", "action_token" ]
Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token
[ "Set", "action", "tokens" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L288-L298
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.user_get_session_token
def user_get_session_token(self, app_id=None, email=None, password=None, ekey=None, fb_access_token=None, tw_oauth_token=None, tw_oauth_token_secret=None, api_key=None): """user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token """ if app_id is None: raise ValueError("app_id must be defined") params = QueryParams({ 'application_id': str(app_id), 'token_version': 2, 'response_format': 'json' }) if fb_access_token: params['fb_access_token'] = fb_access_token signature_keys = ['fb_access_token'] elif tw_oauth_token and tw_oauth_token_secret: params['tw_oauth_token'] = tw_oauth_token params['tw_oauth_token_secret'] = tw_oauth_token_secret signature_keys = ['tw_oauth_token', 'tw_oauth_token_secret'] elif (email or ekey) and password: signature_keys = [] if email: signature_keys.append('email') params['email'] = email if ekey: signature_keys.append('ekey') params['ekey'] = ekey params['password'] = password signature_keys.append('password') else: raise ValueError("Credentials not provided") signature_keys.append('application_id') signature = hashlib.sha1() for key in signature_keys: signature.update(str(params[key]).encode('ascii')) # Note: If the app uses a callback URL to provide its API key, # or if it does not have the "Require Secret Key" option checked, # then the API key may be omitted from the signature if api_key: signature.update(api_key.encode('ascii')) query = urlencode(params) query += '&signature=' + signature.hexdigest() return self.request('user/get_session_token', params=query)
python
def user_get_session_token(self, app_id=None, email=None, password=None, ekey=None, fb_access_token=None, tw_oauth_token=None, tw_oauth_token_secret=None, api_key=None): """user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token """ if app_id is None: raise ValueError("app_id must be defined") params = QueryParams({ 'application_id': str(app_id), 'token_version': 2, 'response_format': 'json' }) if fb_access_token: params['fb_access_token'] = fb_access_token signature_keys = ['fb_access_token'] elif tw_oauth_token and tw_oauth_token_secret: params['tw_oauth_token'] = tw_oauth_token params['tw_oauth_token_secret'] = tw_oauth_token_secret signature_keys = ['tw_oauth_token', 'tw_oauth_token_secret'] elif (email or ekey) and password: signature_keys = [] if email: signature_keys.append('email') params['email'] = email if ekey: signature_keys.append('ekey') params['ekey'] = ekey params['password'] = password signature_keys.append('password') else: raise ValueError("Credentials not provided") signature_keys.append('application_id') signature = hashlib.sha1() for key in signature_keys: signature.update(str(params[key]).encode('ascii')) # Note: If the app uses a callback URL to provide its API key, # or if it does not have the "Require Secret Key" option checked, # then the API key may be omitted from the signature if api_key: signature.update(api_key.encode('ascii')) query = urlencode(params) query += '&signature=' + signature.hexdigest() return self.request('user/get_session_token', params=query)
[ "def", "user_get_session_token", "(", "self", ",", "app_id", "=", "None", ",", "email", "=", "None", ",", "password", "=", "None", ",", "ekey", "=", "None", ",", "fb_access_token", "=", "None", ",", "tw_oauth_token", "=", "None", ",", "tw_oauth_token_secret", "=", "None", ",", "api_key", "=", "None", ")", ":", "if", "app_id", "is", "None", ":", "raise", "ValueError", "(", "\"app_id must be defined\"", ")", "params", "=", "QueryParams", "(", "{", "'application_id'", ":", "str", "(", "app_id", ")", ",", "'token_version'", ":", "2", ",", "'response_format'", ":", "'json'", "}", ")", "if", "fb_access_token", ":", "params", "[", "'fb_access_token'", "]", "=", "fb_access_token", "signature_keys", "=", "[", "'fb_access_token'", "]", "elif", "tw_oauth_token", "and", "tw_oauth_token_secret", ":", "params", "[", "'tw_oauth_token'", "]", "=", "tw_oauth_token", "params", "[", "'tw_oauth_token_secret'", "]", "=", "tw_oauth_token_secret", "signature_keys", "=", "[", "'tw_oauth_token'", ",", "'tw_oauth_token_secret'", "]", "elif", "(", "email", "or", "ekey", ")", "and", "password", ":", "signature_keys", "=", "[", "]", "if", "email", ":", "signature_keys", ".", "append", "(", "'email'", ")", "params", "[", "'email'", "]", "=", "email", "if", "ekey", ":", "signature_keys", ".", "append", "(", "'ekey'", ")", "params", "[", "'ekey'", "]", "=", "ekey", "params", "[", "'password'", "]", "=", "password", "signature_keys", ".", "append", "(", "'password'", ")", "else", ":", "raise", "ValueError", "(", "\"Credentials not provided\"", ")", "signature_keys", ".", "append", "(", "'application_id'", ")", "signature", "=", "hashlib", ".", "sha1", "(", ")", "for", "key", "in", "signature_keys", ":", "signature", ".", "update", "(", "str", "(", "params", "[", "key", "]", ")", ".", "encode", "(", "'ascii'", ")", ")", "# Note: If the app uses a callback URL to provide its API key,", "# or if it does not have the \"Require Secret Key\" option checked,", "# then the API key may be omitted from the signature", "if", "api_key", ":", "signature", ".", "update", "(", "api_key", ".", "encode", "(", "'ascii'", ")", ")", "query", "=", "urlencode", "(", "params", ")", "query", "+=", "'&signature='", "+", "signature", ".", "hexdigest", "(", ")", "return", "self", ".", "request", "(", "'user/get_session_token'", ",", "params", "=", "query", ")" ]
user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token
[ "user", "/", "get_session_token" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L318-L374
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.user_set_avatar
def user_set_avatar(self, action=None, quick_key=None, url=None): """user/set_avatar http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar """ return self.request("user/set_avatar", QueryParams({ "action": action, "quick_key": quick_key, "url": url }))
python
def user_set_avatar(self, action=None, quick_key=None, url=None): """user/set_avatar http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar """ return self.request("user/set_avatar", QueryParams({ "action": action, "quick_key": quick_key, "url": url }))
[ "def", "user_set_avatar", "(", "self", ",", "action", "=", "None", ",", "quick_key", "=", "None", ",", "url", "=", "None", ")", ":", "return", "self", ".", "request", "(", "\"user/set_avatar\"", ",", "QueryParams", "(", "{", "\"action\"", ":", "action", ",", "\"quick_key\"", ":", "quick_key", ",", "\"url\"", ":", "url", "}", ")", ")" ]
user/set_avatar http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar
[ "user", "/", "set_avatar" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L430-L439
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.user_update
def user_update(self, display_name=None, first_name=None, last_name=None, email=None, password=None, current_password=None, birth_date=None, gender=None, website=None, subdomain=None, location=None, newsletter=None, primary_usage=None, timezone=None): """ user/update http://www.mediafire.com/developers/core_api/1.3/user/#update """ return self.request("user/update", QueryParams({ "display_name": display_name, "first_name": first_name, "last_name": last_name, "email": email, "password": password, "current_password": current_password, "birth_date": birth_date, "gender": gender, "website": website, "subdomain": subdomain, "location": location, "newsletter": newsletter, "primary_usage": primary_usage, "timezone": timezone }))
python
def user_update(self, display_name=None, first_name=None, last_name=None, email=None, password=None, current_password=None, birth_date=None, gender=None, website=None, subdomain=None, location=None, newsletter=None, primary_usage=None, timezone=None): """ user/update http://www.mediafire.com/developers/core_api/1.3/user/#update """ return self.request("user/update", QueryParams({ "display_name": display_name, "first_name": first_name, "last_name": last_name, "email": email, "password": password, "current_password": current_password, "birth_date": birth_date, "gender": gender, "website": website, "subdomain": subdomain, "location": location, "newsletter": newsletter, "primary_usage": primary_usage, "timezone": timezone }))
[ "def", "user_update", "(", "self", ",", "display_name", "=", "None", ",", "first_name", "=", "None", ",", "last_name", "=", "None", ",", "email", "=", "None", ",", "password", "=", "None", ",", "current_password", "=", "None", ",", "birth_date", "=", "None", ",", "gender", "=", "None", ",", "website", "=", "None", ",", "subdomain", "=", "None", ",", "location", "=", "None", ",", "newsletter", "=", "None", ",", "primary_usage", "=", "None", ",", "timezone", "=", "None", ")", ":", "return", "self", ".", "request", "(", "\"user/update\"", ",", "QueryParams", "(", "{", "\"display_name\"", ":", "display_name", ",", "\"first_name\"", ":", "first_name", ",", "\"last_name\"", ":", "last_name", ",", "\"email\"", ":", "email", ",", "\"password\"", ":", "password", ",", "\"current_password\"", ":", "current_password", ",", "\"birth_date\"", ":", "birth_date", ",", "\"gender\"", ":", "gender", ",", "\"website\"", ":", "website", ",", "\"subdomain\"", ":", "subdomain", ",", "\"location\"", ":", "location", ",", "\"newsletter\"", ":", "newsletter", ",", "\"primary_usage\"", ":", "primary_usage", ",", "\"timezone\"", ":", "timezone", "}", ")", ")" ]
user/update http://www.mediafire.com/developers/core_api/1.3/user/#update
[ "user", "/", "update" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L441-L466
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_get_info
def folder_get_info(self, folder_key=None, device_id=None, details=None): """folder/get_info http://www.mediafire.com/developers/core_api/1.3/folder/#get_info """ return self.request('folder/get_info', QueryParams({ 'folder_key': folder_key, 'device_id': device_id, 'details': details }))
python
def folder_get_info(self, folder_key=None, device_id=None, details=None): """folder/get_info http://www.mediafire.com/developers/core_api/1.3/folder/#get_info """ return self.request('folder/get_info', QueryParams({ 'folder_key': folder_key, 'device_id': device_id, 'details': details }))
[ "def", "folder_get_info", "(", "self", ",", "folder_key", "=", "None", ",", "device_id", "=", "None", ",", "details", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/get_info'", ",", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'device_id'", ":", "device_id", ",", "'details'", ":", "details", "}", ")", ")" ]
folder/get_info http://www.mediafire.com/developers/core_api/1.3/folder/#get_info
[ "folder", "/", "get_info" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L468-L477
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_get_content
def folder_get_content(self, folder_key=None, content_type=None, filter_=None, device_id=None, order_by=None, order_direction=None, chunk=None, details=None, chunk_size=None): """folder/get_content http://www.mediafire.com/developers/core_api/1.3/folder/#get_content """ return self.request('folder/get_content', QueryParams({ 'folder_key': folder_key, 'content_type': content_type, 'filter': filter_, 'device_id': device_id, 'order_by': order_by, 'order_direction': order_direction, 'chunk': chunk, 'details': details, 'chunk_size': chunk_size }))
python
def folder_get_content(self, folder_key=None, content_type=None, filter_=None, device_id=None, order_by=None, order_direction=None, chunk=None, details=None, chunk_size=None): """folder/get_content http://www.mediafire.com/developers/core_api/1.3/folder/#get_content """ return self.request('folder/get_content', QueryParams({ 'folder_key': folder_key, 'content_type': content_type, 'filter': filter_, 'device_id': device_id, 'order_by': order_by, 'order_direction': order_direction, 'chunk': chunk, 'details': details, 'chunk_size': chunk_size }))
[ "def", "folder_get_content", "(", "self", ",", "folder_key", "=", "None", ",", "content_type", "=", "None", ",", "filter_", "=", "None", ",", "device_id", "=", "None", ",", "order_by", "=", "None", ",", "order_direction", "=", "None", ",", "chunk", "=", "None", ",", "details", "=", "None", ",", "chunk_size", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/get_content'", ",", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'content_type'", ":", "content_type", ",", "'filter'", ":", "filter_", ",", "'device_id'", ":", "device_id", ",", "'order_by'", ":", "order_by", ",", "'order_direction'", ":", "order_direction", ",", "'chunk'", ":", "chunk", ",", "'details'", ":", "details", ",", "'chunk_size'", ":", "chunk_size", "}", ")", ")" ]
folder/get_content http://www.mediafire.com/developers/core_api/1.3/folder/#get_content
[ "folder", "/", "get_content" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L479-L497
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_update
def folder_update(self, folder_key, foldername=None, description=None, privacy=None, privacy_recursive=None, mtime=None): """folder/update http://www.mediafire.com/developers/core_api/1.3/folder/#update """ return self.request('folder/update', QueryParams({ 'folder_key': folder_key, 'foldername': foldername, 'description': description, 'privacy': privacy, 'privacy_recursive': privacy_recursive, 'mtime': mtime }))
python
def folder_update(self, folder_key, foldername=None, description=None, privacy=None, privacy_recursive=None, mtime=None): """folder/update http://www.mediafire.com/developers/core_api/1.3/folder/#update """ return self.request('folder/update', QueryParams({ 'folder_key': folder_key, 'foldername': foldername, 'description': description, 'privacy': privacy, 'privacy_recursive': privacy_recursive, 'mtime': mtime }))
[ "def", "folder_update", "(", "self", ",", "folder_key", ",", "foldername", "=", "None", ",", "description", "=", "None", ",", "privacy", "=", "None", ",", "privacy_recursive", "=", "None", ",", "mtime", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/update'", ",", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'foldername'", ":", "foldername", ",", "'description'", ":", "description", ",", "'privacy'", ":", "privacy", ",", "'privacy_recursive'", ":", "privacy_recursive", ",", "'mtime'", ":", "mtime", "}", ")", ")" ]
folder/update http://www.mediafire.com/developers/core_api/1.3/folder/#update
[ "folder", "/", "update" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L499-L512
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_create
def folder_create(self, foldername=None, parent_key=None, action_on_duplicate=None, mtime=None): """folder/create http://www.mediafire.com/developers/core_api/1.3/folder/#create """ return self.request('folder/create', QueryParams({ 'foldername': foldername, 'parent_key': parent_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }))
python
def folder_create(self, foldername=None, parent_key=None, action_on_duplicate=None, mtime=None): """folder/create http://www.mediafire.com/developers/core_api/1.3/folder/#create """ return self.request('folder/create', QueryParams({ 'foldername': foldername, 'parent_key': parent_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }))
[ "def", "folder_create", "(", "self", ",", "foldername", "=", "None", ",", "parent_key", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/create'", ",", "QueryParams", "(", "{", "'foldername'", ":", "foldername", ",", "'parent_key'", ":", "parent_key", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", "}", ")", ")" ]
folder/create http://www.mediafire.com/developers/core_api/1.3/folder/#create
[ "folder", "/", "create" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L514-L525
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_check
def upload_check(self, filename=None, folder_key=None, filedrop_key=None, size=None, hash_=None, path=None, resumable=None): """upload/check http://www.mediafire.com/developers/core_api/1.3/upload/#check """ return self.request('upload/check', QueryParams({ 'filename': filename, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'size': size, 'hash': hash_, 'path': path, 'resumable': resumable }))
python
def upload_check(self, filename=None, folder_key=None, filedrop_key=None, size=None, hash_=None, path=None, resumable=None): """upload/check http://www.mediafire.com/developers/core_api/1.3/upload/#check """ return self.request('upload/check', QueryParams({ 'filename': filename, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'size': size, 'hash': hash_, 'path': path, 'resumable': resumable }))
[ "def", "upload_check", "(", "self", ",", "filename", "=", "None", ",", "folder_key", "=", "None", ",", "filedrop_key", "=", "None", ",", "size", "=", "None", ",", "hash_", "=", "None", ",", "path", "=", "None", ",", "resumable", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'upload/check'", ",", "QueryParams", "(", "{", "'filename'", ":", "filename", ",", "'folder_key'", ":", "folder_key", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'size'", ":", "size", ",", "'hash'", ":", "hash_", ",", "'path'", ":", "path", ",", "'resumable'", ":", "resumable", "}", ")", ")" ]
upload/check http://www.mediafire.com/developers/core_api/1.3/upload/#check
[ "upload", "/", "check" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L555-L569
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_simple
def upload_simple(self, fd, filename, folder_key=None, path=None, filedrop_key=None, action_on_duplicate=None, mtime=None, file_size=None, file_hash=None): """upload/simple http://www.mediafire.com/developers/core_api/1.3/upload/#simple """ action = 'upload/simple' params = QueryParams({ 'folder_key': folder_key, 'path': path, 'filedrop_key': filedrop_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }) headers = QueryParams({ 'X-Filesize': str(file_size), 'X-Filehash': file_hash, 'X-Filename': filename.encode('utf-8') }) upload_info = { "fd": fd, } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
python
def upload_simple(self, fd, filename, folder_key=None, path=None, filedrop_key=None, action_on_duplicate=None, mtime=None, file_size=None, file_hash=None): """upload/simple http://www.mediafire.com/developers/core_api/1.3/upload/#simple """ action = 'upload/simple' params = QueryParams({ 'folder_key': folder_key, 'path': path, 'filedrop_key': filedrop_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }) headers = QueryParams({ 'X-Filesize': str(file_size), 'X-Filehash': file_hash, 'X-Filename': filename.encode('utf-8') }) upload_info = { "fd": fd, } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
[ "def", "upload_simple", "(", "self", ",", "fd", ",", "filename", ",", "folder_key", "=", "None", ",", "path", "=", "None", ",", "filedrop_key", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ",", "file_size", "=", "None", ",", "file_hash", "=", "None", ")", ":", "action", "=", "'upload/simple'", "params", "=", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'path'", ":", "path", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", "}", ")", "headers", "=", "QueryParams", "(", "{", "'X-Filesize'", ":", "str", "(", "file_size", ")", ",", "'X-Filehash'", ":", "file_hash", ",", "'X-Filename'", ":", "filename", ".", "encode", "(", "'utf-8'", ")", "}", ")", "upload_info", "=", "{", "\"fd\"", ":", "fd", ",", "}", "return", "self", ".", "request", "(", "action", ",", "params", ",", "action_token_type", "=", "\"upload\"", ",", "upload_info", "=", "upload_info", ",", "headers", "=", "headers", ")" ]
upload/simple http://www.mediafire.com/developers/core_api/1.3/upload/#simple
[ "upload", "/", "simple" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L571-L599
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_resumable
def upload_resumable(self, fd, filesize, filehash, unit_hash, unit_id, unit_size, quick_key=None, action_on_duplicate=None, mtime=None, version_control=None, folder_key=None, filedrop_key=None, path=None, previous_hash=None): """upload/resumable http://www.mediafire.com/developers/core_api/1.3/upload/#resumable """ action = 'upload/resumable' headers = { 'x-filesize': str(filesize), 'x-filehash': filehash, 'x-unit-hash': unit_hash, 'x-unit-id': str(unit_id), 'x-unit-size': str(unit_size) } params = QueryParams({ 'quick_key': quick_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'previous_hash': previous_hash }) upload_info = { "fd": fd, "filename": "chunk" } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
python
def upload_resumable(self, fd, filesize, filehash, unit_hash, unit_id, unit_size, quick_key=None, action_on_duplicate=None, mtime=None, version_control=None, folder_key=None, filedrop_key=None, path=None, previous_hash=None): """upload/resumable http://www.mediafire.com/developers/core_api/1.3/upload/#resumable """ action = 'upload/resumable' headers = { 'x-filesize': str(filesize), 'x-filehash': filehash, 'x-unit-hash': unit_hash, 'x-unit-id': str(unit_id), 'x-unit-size': str(unit_size) } params = QueryParams({ 'quick_key': quick_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'previous_hash': previous_hash }) upload_info = { "fd": fd, "filename": "chunk" } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
[ "def", "upload_resumable", "(", "self", ",", "fd", ",", "filesize", ",", "filehash", ",", "unit_hash", ",", "unit_id", ",", "unit_size", ",", "quick_key", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ",", "version_control", "=", "None", ",", "folder_key", "=", "None", ",", "filedrop_key", "=", "None", ",", "path", "=", "None", ",", "previous_hash", "=", "None", ")", ":", "action", "=", "'upload/resumable'", "headers", "=", "{", "'x-filesize'", ":", "str", "(", "filesize", ")", ",", "'x-filehash'", ":", "filehash", ",", "'x-unit-hash'", ":", "unit_hash", ",", "'x-unit-id'", ":", "str", "(", "unit_id", ")", ",", "'x-unit-size'", ":", "str", "(", "unit_size", ")", "}", "params", "=", "QueryParams", "(", "{", "'quick_key'", ":", "quick_key", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", ",", "'version_control'", ":", "version_control", ",", "'folder_key'", ":", "folder_key", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'path'", ":", "path", ",", "'previous_hash'", ":", "previous_hash", "}", ")", "upload_info", "=", "{", "\"fd\"", ":", "fd", ",", "\"filename\"", ":", "\"chunk\"", "}", "return", "self", ".", "request", "(", "action", ",", "params", ",", "action_token_type", "=", "\"upload\"", ",", "upload_info", "=", "upload_info", ",", "headers", "=", "headers", ")" ]
upload/resumable http://www.mediafire.com/developers/core_api/1.3/upload/#resumable
[ "upload", "/", "resumable" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L603-L638
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_instant
def upload_instant(self, filename, size, hash_, quick_key=None, folder_key=None, filedrop_key=None, path=None, action_on_duplicate=None, mtime=None, version_control=None, previous_hash=None): """upload/instant http://www.mediafire.com/developers/core_api/1.3/upload/#instant """ return self.request('upload/instant', QueryParams({ 'filename': filename, 'size': size, 'hash': hash_, 'quick_key': quick_key, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'previous_hash': previous_hash }))
python
def upload_instant(self, filename, size, hash_, quick_key=None, folder_key=None, filedrop_key=None, path=None, action_on_duplicate=None, mtime=None, version_control=None, previous_hash=None): """upload/instant http://www.mediafire.com/developers/core_api/1.3/upload/#instant """ return self.request('upload/instant', QueryParams({ 'filename': filename, 'size': size, 'hash': hash_, 'quick_key': quick_key, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'previous_hash': previous_hash }))
[ "def", "upload_instant", "(", "self", ",", "filename", ",", "size", ",", "hash_", ",", "quick_key", "=", "None", ",", "folder_key", "=", "None", ",", "filedrop_key", "=", "None", ",", "path", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ",", "version_control", "=", "None", ",", "previous_hash", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'upload/instant'", ",", "QueryParams", "(", "{", "'filename'", ":", "filename", ",", "'size'", ":", "size", ",", "'hash'", ":", "hash_", ",", "'quick_key'", ":", "quick_key", ",", "'folder_key'", ":", "folder_key", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'path'", ":", "path", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", ",", "'version_control'", ":", "version_control", ",", "'previous_hash'", ":", "previous_hash", "}", ")", ")" ]
upload/instant http://www.mediafire.com/developers/core_api/1.3/upload/#instant
[ "upload", "/", "instant" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L641-L661
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.file_update
def file_update(self, quick_key, filename=None, description=None, mtime=None, privacy=None): """file/update http://www.mediafire.com/developers/core_api/1.3/file/#update """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy }))
python
def file_update(self, quick_key, filename=None, description=None, mtime=None, privacy=None): """file/update http://www.mediafire.com/developers/core_api/1.3/file/#update """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy }))
[ "def", "file_update", "(", "self", ",", "quick_key", ",", "filename", "=", "None", ",", "description", "=", "None", ",", "mtime", "=", "None", ",", "privacy", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'file/update'", ",", "QueryParams", "(", "{", "'quick_key'", ":", "quick_key", ",", "'filename'", ":", "filename", ",", "'description'", ":", "description", ",", "'mtime'", ":", "mtime", ",", "'privacy'", ":", "privacy", "}", ")", ")" ]
file/update http://www.mediafire.com/developers/core_api/1.3/file/#update
[ "file", "/", "update" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L691-L703
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.file_update_file
def file_update_file(self, quick_key, file_extension=None, filename=None, description=None, mtime=None, privacy=None, timezone=None): """file/update_file http://www.mediafire.com/developers/core_api/1.3/file/#update_file """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'file_extension': file_extension, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy, 'timezone': timezone }))
python
def file_update_file(self, quick_key, file_extension=None, filename=None, description=None, mtime=None, privacy=None, timezone=None): """file/update_file http://www.mediafire.com/developers/core_api/1.3/file/#update_file """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'file_extension': file_extension, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy, 'timezone': timezone }))
[ "def", "file_update_file", "(", "self", ",", "quick_key", ",", "file_extension", "=", "None", ",", "filename", "=", "None", ",", "description", "=", "None", ",", "mtime", "=", "None", ",", "privacy", "=", "None", ",", "timezone", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'file/update'", ",", "QueryParams", "(", "{", "'quick_key'", ":", "quick_key", ",", "'file_extension'", ":", "file_extension", ",", "'filename'", ":", "filename", ",", "'description'", ":", "description", ",", "'mtime'", ":", "mtime", ",", "'privacy'", ":", "privacy", ",", "'timezone'", ":", "timezone", "}", ")", ")" ]
file/update_file http://www.mediafire.com/developers/core_api/1.3/file/#update_file
[ "file", "/", "update_file" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L705-L720
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.file_zip
def file_zip(self, keys, confirm_download=None, meta_only=None): """file/zip http://www.mediafire.com/developers/core_api/1.3/file/#zip """ return self.request('file/zip', QueryParams({ 'keys': keys, 'confirm_download': confirm_download, 'meta_only': meta_only }))
python
def file_zip(self, keys, confirm_download=None, meta_only=None): """file/zip http://www.mediafire.com/developers/core_api/1.3/file/#zip """ return self.request('file/zip', QueryParams({ 'keys': keys, 'confirm_download': confirm_download, 'meta_only': meta_only }))
[ "def", "file_zip", "(", "self", ",", "keys", ",", "confirm_download", "=", "None", ",", "meta_only", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'file/zip'", ",", "QueryParams", "(", "{", "'keys'", ":", "keys", ",", "'confirm_download'", ":", "confirm_download", ",", "'meta_only'", ":", "meta_only", "}", ")", ")" ]
file/zip http://www.mediafire.com/developers/core_api/1.3/file/#zip
[ "file", "/", "zip" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L750-L759
dlecocq/nsq-py
nsq/connection.py
Connection._reset
def _reset(self): '''Reset all of our stateful variables''' self._socket = None # The pending messages we have to send, and the current buffer we're # sending self._pending = deque() self._out_buffer = '' # Our read buffer self._buffer = '' # The identify response we last received from the server self._identify_response = {} # Our ready state self.last_ready_sent = 0 self.ready = 0
python
def _reset(self): '''Reset all of our stateful variables''' self._socket = None # The pending messages we have to send, and the current buffer we're # sending self._pending = deque() self._out_buffer = '' # Our read buffer self._buffer = '' # The identify response we last received from the server self._identify_response = {} # Our ready state self.last_ready_sent = 0 self.ready = 0
[ "def", "_reset", "(", "self", ")", ":", "self", ".", "_socket", "=", "None", "# The pending messages we have to send, and the current buffer we're", "# sending", "self", ".", "_pending", "=", "deque", "(", ")", "self", ".", "_out_buffer", "=", "''", "# Our read buffer", "self", ".", "_buffer", "=", "''", "# The identify response we last received from the server", "self", ".", "_identify_response", "=", "{", "}", "# Our ready state", "self", ".", "last_ready_sent", "=", "0", "self", ".", "ready", "=", "0" ]
Reset all of our stateful variables
[ "Reset", "all", "of", "our", "stateful", "variables" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L91-L104
dlecocq/nsq-py
nsq/connection.py
Connection.connect
def connect(self, force=False): '''Establish a connection''' # Don't re-establish existing connections if not force and self.alive(): return True self._reset() # Otherwise, try to connect with self._socket_lock: try: logger.info('Creating socket...') self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.settimeout(self._timeout) logger.info('Connecting to %s, %s', self.host, self.port) self._socket.connect((self.host, self.port)) # Set our socket's blocking state to whatever ours is self._socket.setblocking(self._blocking) # Safely write our magic self._pending.append(constants.MAGIC_V2) while self.pending(): self.flush() # And send our identify command self.identify(self._identify_options) while self.pending(): self.flush() self._reconnnection_counter.success() # Wait until we've gotten a response to IDENTIFY, try to read # one. Also, only spend up to the provided timeout waiting to # establish the connection. limit = time.time() + self._timeout responses = self._read(1) while (not responses) and (time.time() < limit): responses = self._read(1) if not responses: raise ConnectionTimeoutException( 'Read identify response timed out (%ss)' % self._timeout) self.identified(responses[0]) return True except: logger.exception('Failed to connect') if self._socket: self._socket.close() self._reconnnection_counter.failed() self._reset() return False
python
def connect(self, force=False): '''Establish a connection''' # Don't re-establish existing connections if not force and self.alive(): return True self._reset() # Otherwise, try to connect with self._socket_lock: try: logger.info('Creating socket...') self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.settimeout(self._timeout) logger.info('Connecting to %s, %s', self.host, self.port) self._socket.connect((self.host, self.port)) # Set our socket's blocking state to whatever ours is self._socket.setblocking(self._blocking) # Safely write our magic self._pending.append(constants.MAGIC_V2) while self.pending(): self.flush() # And send our identify command self.identify(self._identify_options) while self.pending(): self.flush() self._reconnnection_counter.success() # Wait until we've gotten a response to IDENTIFY, try to read # one. Also, only spend up to the provided timeout waiting to # establish the connection. limit = time.time() + self._timeout responses = self._read(1) while (not responses) and (time.time() < limit): responses = self._read(1) if not responses: raise ConnectionTimeoutException( 'Read identify response timed out (%ss)' % self._timeout) self.identified(responses[0]) return True except: logger.exception('Failed to connect') if self._socket: self._socket.close() self._reconnnection_counter.failed() self._reset() return False
[ "def", "connect", "(", "self", ",", "force", "=", "False", ")", ":", "# Don't re-establish existing connections", "if", "not", "force", "and", "self", ".", "alive", "(", ")", ":", "return", "True", "self", ".", "_reset", "(", ")", "# Otherwise, try to connect", "with", "self", ".", "_socket_lock", ":", "try", ":", "logger", ".", "info", "(", "'Creating socket...'", ")", "self", ".", "_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "self", ".", "_socket", ".", "settimeout", "(", "self", ".", "_timeout", ")", "logger", ".", "info", "(", "'Connecting to %s, %s'", ",", "self", ".", "host", ",", "self", ".", "port", ")", "self", ".", "_socket", ".", "connect", "(", "(", "self", ".", "host", ",", "self", ".", "port", ")", ")", "# Set our socket's blocking state to whatever ours is", "self", ".", "_socket", ".", "setblocking", "(", "self", ".", "_blocking", ")", "# Safely write our magic", "self", ".", "_pending", ".", "append", "(", "constants", ".", "MAGIC_V2", ")", "while", "self", ".", "pending", "(", ")", ":", "self", ".", "flush", "(", ")", "# And send our identify command", "self", ".", "identify", "(", "self", ".", "_identify_options", ")", "while", "self", ".", "pending", "(", ")", ":", "self", ".", "flush", "(", ")", "self", ".", "_reconnnection_counter", ".", "success", "(", ")", "# Wait until we've gotten a response to IDENTIFY, try to read", "# one. Also, only spend up to the provided timeout waiting to", "# establish the connection.", "limit", "=", "time", ".", "time", "(", ")", "+", "self", ".", "_timeout", "responses", "=", "self", ".", "_read", "(", "1", ")", "while", "(", "not", "responses", ")", "and", "(", "time", ".", "time", "(", ")", "<", "limit", ")", ":", "responses", "=", "self", ".", "_read", "(", "1", ")", "if", "not", "responses", ":", "raise", "ConnectionTimeoutException", "(", "'Read identify response timed out (%ss)'", "%", "self", ".", "_timeout", ")", "self", ".", "identified", "(", "responses", "[", "0", "]", ")", "return", "True", "except", ":", "logger", ".", "exception", "(", "'Failed to connect'", ")", "if", "self", ".", "_socket", ":", "self", ".", "_socket", ".", "close", "(", ")", "self", ".", "_reconnnection_counter", ".", "failed", "(", ")", "self", ".", "_reset", "(", ")", "return", "False" ]
Establish a connection
[ "Establish", "a", "connection" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L106-L151
dlecocq/nsq-py
nsq/connection.py
Connection.close
def close(self): '''Close our connection''' # Flush any unsent message try: while self.pending(): self.flush() except socket.error: pass with self._socket_lock: try: if self._socket: self._socket.close() finally: self._reset()
python
def close(self): '''Close our connection''' # Flush any unsent message try: while self.pending(): self.flush() except socket.error: pass with self._socket_lock: try: if self._socket: self._socket.close() finally: self._reset()
[ "def", "close", "(", "self", ")", ":", "# Flush any unsent message", "try", ":", "while", "self", ".", "pending", "(", ")", ":", "self", ".", "flush", "(", ")", "except", "socket", ".", "error", ":", "pass", "with", "self", ".", "_socket_lock", ":", "try", ":", "if", "self", ".", "_socket", ":", "self", ".", "_socket", ".", "close", "(", ")", "finally", ":", "self", ".", "_reset", "(", ")" ]
Close our connection
[ "Close", "our", "connection" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L153-L166
dlecocq/nsq-py
nsq/connection.py
Connection.socket
def socket(self, blocking=True): '''Blockingly yield the socket''' # If the socket is available, then yield it. Otherwise, yield nothing if self._socket_lock.acquire(blocking): try: yield self._socket finally: self._socket_lock.release()
python
def socket(self, blocking=True): '''Blockingly yield the socket''' # If the socket is available, then yield it. Otherwise, yield nothing if self._socket_lock.acquire(blocking): try: yield self._socket finally: self._socket_lock.release()
[ "def", "socket", "(", "self", ",", "blocking", "=", "True", ")", ":", "# If the socket is available, then yield it. Otherwise, yield nothing", "if", "self", ".", "_socket_lock", ".", "acquire", "(", "blocking", ")", ":", "try", ":", "yield", "self", ".", "_socket", "finally", ":", "self", ".", "_socket_lock", ".", "release", "(", ")" ]
Blockingly yield the socket
[ "Blockingly", "yield", "the", "socket" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L168-L175
dlecocq/nsq-py
nsq/connection.py
Connection.identified
def identified(self, res): '''Handle a response to our 'identify' command. Returns response''' # If they support it, they should give us a JSON blob which we should # inspect. try: res.data = json.loads(res.data) self._identify_response = res.data logger.info('Got identify response: %s', res.data) except: logger.warn('Server does not support feature negotiation') self._identify_response = {} # Save our max ready count unless it's not provided self.max_rdy_count = self._identify_response.get( 'max_rdy_count', self.max_rdy_count) if self._identify_options.get('tls_v1', False): if not self._identify_response.get('tls_v1', False): raise UnsupportedException( 'NSQd instance does not support TLS') else: self._socket = TLSSocket.wrap_socket(self._socket) # Now is the appropriate time to send auth if self._identify_response.get('auth_required', False): if not self._auth_secret: raise UnsupportedException( 'Auth required but not provided') else: self.auth(self._auth_secret) # If we're not talking over TLS, warn the user if not self._identify_response.get('tls_v1', False): logger.warn('Using AUTH without TLS') elif self._auth_secret: logger.warn('Authentication secret provided but not required') return res
python
def identified(self, res): '''Handle a response to our 'identify' command. Returns response''' # If they support it, they should give us a JSON blob which we should # inspect. try: res.data = json.loads(res.data) self._identify_response = res.data logger.info('Got identify response: %s', res.data) except: logger.warn('Server does not support feature negotiation') self._identify_response = {} # Save our max ready count unless it's not provided self.max_rdy_count = self._identify_response.get( 'max_rdy_count', self.max_rdy_count) if self._identify_options.get('tls_v1', False): if not self._identify_response.get('tls_v1', False): raise UnsupportedException( 'NSQd instance does not support TLS') else: self._socket = TLSSocket.wrap_socket(self._socket) # Now is the appropriate time to send auth if self._identify_response.get('auth_required', False): if not self._auth_secret: raise UnsupportedException( 'Auth required but not provided') else: self.auth(self._auth_secret) # If we're not talking over TLS, warn the user if not self._identify_response.get('tls_v1', False): logger.warn('Using AUTH without TLS') elif self._auth_secret: logger.warn('Authentication secret provided but not required') return res
[ "def", "identified", "(", "self", ",", "res", ")", ":", "# If they support it, they should give us a JSON blob which we should", "# inspect.", "try", ":", "res", ".", "data", "=", "json", ".", "loads", "(", "res", ".", "data", ")", "self", ".", "_identify_response", "=", "res", ".", "data", "logger", ".", "info", "(", "'Got identify response: %s'", ",", "res", ".", "data", ")", "except", ":", "logger", ".", "warn", "(", "'Server does not support feature negotiation'", ")", "self", ".", "_identify_response", "=", "{", "}", "# Save our max ready count unless it's not provided", "self", ".", "max_rdy_count", "=", "self", ".", "_identify_response", ".", "get", "(", "'max_rdy_count'", ",", "self", ".", "max_rdy_count", ")", "if", "self", ".", "_identify_options", ".", "get", "(", "'tls_v1'", ",", "False", ")", ":", "if", "not", "self", ".", "_identify_response", ".", "get", "(", "'tls_v1'", ",", "False", ")", ":", "raise", "UnsupportedException", "(", "'NSQd instance does not support TLS'", ")", "else", ":", "self", ".", "_socket", "=", "TLSSocket", ".", "wrap_socket", "(", "self", ".", "_socket", ")", "# Now is the appropriate time to send auth", "if", "self", ".", "_identify_response", ".", "get", "(", "'auth_required'", ",", "False", ")", ":", "if", "not", "self", ".", "_auth_secret", ":", "raise", "UnsupportedException", "(", "'Auth required but not provided'", ")", "else", ":", "self", ".", "auth", "(", "self", ".", "_auth_secret", ")", "# If we're not talking over TLS, warn the user", "if", "not", "self", ".", "_identify_response", ".", "get", "(", "'tls_v1'", ",", "False", ")", ":", "logger", ".", "warn", "(", "'Using AUTH without TLS'", ")", "elif", "self", ".", "_auth_secret", ":", "logger", ".", "warn", "(", "'Authentication secret provided but not required'", ")", "return", "res" ]
Handle a response to our 'identify' command. Returns response
[ "Handle", "a", "response", "to", "our", "identify", "command", ".", "Returns", "response" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L177-L211
dlecocq/nsq-py
nsq/connection.py
Connection.setblocking
def setblocking(self, blocking): '''Set whether or not this message is blocking''' for sock in self.socket(): sock.setblocking(blocking) self._blocking = blocking
python
def setblocking(self, blocking): '''Set whether or not this message is blocking''' for sock in self.socket(): sock.setblocking(blocking) self._blocking = blocking
[ "def", "setblocking", "(", "self", ",", "blocking", ")", ":", "for", "sock", "in", "self", ".", "socket", "(", ")", ":", "sock", ".", "setblocking", "(", "blocking", ")", "self", ".", "_blocking", "=", "blocking" ]
Set whether or not this message is blocking
[ "Set", "whether", "or", "not", "this", "message", "is", "blocking" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L217-L221
dlecocq/nsq-py
nsq/connection.py
Connection.flush
def flush(self): '''Flush some of the waiting messages, returns count written''' # When profiling, we found that while there was some efficiency to be # gained elsewhere, the big performance hit is sending lots of small # messages at a time. In particular, consumers send many 'FIN' messages # which are very small indeed and the cost of dispatching so many system # calls is very high. Instead, we prefer to glom together many messages # into a single string to send at once. total = 0 for sock in self.socket(blocking=False): # If there's nothing left in the out buffer, take whatever's in the # pending queue. # # When using SSL, if the socket throws 'SSL_WANT_WRITE', then the # subsequent send requests have to send the same buffer. pending = self._pending data = self._out_buffer or ''.join( pending.popleft() for _ in xrange(len(pending))) try: # Try to send as much of the first message as possible total = sock.send(data) except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] not in self.WOULD_BLOCK_ERRS: raise self._out_buffer = data else: self._out_buffer = None finally: if total < len(data): # Save the rest of the message that could not be sent self._pending.appendleft(data[total:]) return total
python
def flush(self): '''Flush some of the waiting messages, returns count written''' # When profiling, we found that while there was some efficiency to be # gained elsewhere, the big performance hit is sending lots of small # messages at a time. In particular, consumers send many 'FIN' messages # which are very small indeed and the cost of dispatching so many system # calls is very high. Instead, we prefer to glom together many messages # into a single string to send at once. total = 0 for sock in self.socket(blocking=False): # If there's nothing left in the out buffer, take whatever's in the # pending queue. # # When using SSL, if the socket throws 'SSL_WANT_WRITE', then the # subsequent send requests have to send the same buffer. pending = self._pending data = self._out_buffer or ''.join( pending.popleft() for _ in xrange(len(pending))) try: # Try to send as much of the first message as possible total = sock.send(data) except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] not in self.WOULD_BLOCK_ERRS: raise self._out_buffer = data else: self._out_buffer = None finally: if total < len(data): # Save the rest of the message that could not be sent self._pending.appendleft(data[total:]) return total
[ "def", "flush", "(", "self", ")", ":", "# When profiling, we found that while there was some efficiency to be", "# gained elsewhere, the big performance hit is sending lots of small", "# messages at a time. In particular, consumers send many 'FIN' messages", "# which are very small indeed and the cost of dispatching so many system", "# calls is very high. Instead, we prefer to glom together many messages", "# into a single string to send at once.", "total", "=", "0", "for", "sock", "in", "self", ".", "socket", "(", "blocking", "=", "False", ")", ":", "# If there's nothing left in the out buffer, take whatever's in the", "# pending queue.", "#", "# When using SSL, if the socket throws 'SSL_WANT_WRITE', then the", "# subsequent send requests have to send the same buffer.", "pending", "=", "self", ".", "_pending", "data", "=", "self", ".", "_out_buffer", "or", "''", ".", "join", "(", "pending", ".", "popleft", "(", ")", "for", "_", "in", "xrange", "(", "len", "(", "pending", ")", ")", ")", "try", ":", "# Try to send as much of the first message as possible", "total", "=", "sock", ".", "send", "(", "data", ")", "except", "socket", ".", "error", "as", "exc", ":", "# Catch (errno, message)-type socket.errors", "if", "exc", ".", "args", "[", "0", "]", "not", "in", "self", ".", "WOULD_BLOCK_ERRS", ":", "raise", "self", ".", "_out_buffer", "=", "data", "else", ":", "self", ".", "_out_buffer", "=", "None", "finally", ":", "if", "total", "<", "len", "(", "data", ")", ":", "# Save the rest of the message that could not be sent", "self", ".", "_pending", ".", "appendleft", "(", "data", "[", "total", ":", "]", ")", "return", "total" ]
Flush some of the waiting messages, returns count written
[ "Flush", "some", "of", "the", "waiting", "messages", "returns", "count", "written" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L234-L266
dlecocq/nsq-py
nsq/connection.py
Connection.send
def send(self, command, message=None): '''Send a command over the socket with length endcoded''' if message: joined = command + constants.NL + util.pack(message) else: joined = command + constants.NL if self._blocking: for sock in self.socket(): sock.sendall(joined) else: self._pending.append(joined)
python
def send(self, command, message=None): '''Send a command over the socket with length endcoded''' if message: joined = command + constants.NL + util.pack(message) else: joined = command + constants.NL if self._blocking: for sock in self.socket(): sock.sendall(joined) else: self._pending.append(joined)
[ "def", "send", "(", "self", ",", "command", ",", "message", "=", "None", ")", ":", "if", "message", ":", "joined", "=", "command", "+", "constants", ".", "NL", "+", "util", ".", "pack", "(", "message", ")", "else", ":", "joined", "=", "command", "+", "constants", ".", "NL", "if", "self", ".", "_blocking", ":", "for", "sock", "in", "self", ".", "socket", "(", ")", ":", "sock", ".", "sendall", "(", "joined", ")", "else", ":", "self", ".", "_pending", ".", "append", "(", "joined", ")" ]
Send a command over the socket with length endcoded
[ "Send", "a", "command", "over", "the", "socket", "with", "length", "endcoded" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L268-L278
dlecocq/nsq-py
nsq/connection.py
Connection.identify
def identify(self, data): '''Send an identification message''' return self.send(constants.IDENTIFY, json.dumps(data))
python
def identify(self, data): '''Send an identification message''' return self.send(constants.IDENTIFY, json.dumps(data))
[ "def", "identify", "(", "self", ",", "data", ")", ":", "return", "self", ".", "send", "(", "constants", ".", "IDENTIFY", ",", "json", ".", "dumps", "(", "data", ")", ")" ]
Send an identification message
[ "Send", "an", "identification", "message" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L280-L282
dlecocq/nsq-py
nsq/connection.py
Connection.sub
def sub(self, topic, channel): '''Subscribe to a topic/channel''' return self.send(' '.join((constants.SUB, topic, channel)))
python
def sub(self, topic, channel): '''Subscribe to a topic/channel''' return self.send(' '.join((constants.SUB, topic, channel)))
[ "def", "sub", "(", "self", ",", "topic", ",", "channel", ")", ":", "return", "self", ".", "send", "(", "' '", ".", "join", "(", "(", "constants", ".", "SUB", ",", "topic", ",", "channel", ")", ")", ")" ]
Subscribe to a topic/channel
[ "Subscribe", "to", "a", "topic", "/", "channel" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L288-L290
dlecocq/nsq-py
nsq/connection.py
Connection.pub
def pub(self, topic, message): '''Publish to a topic''' return self.send(' '.join((constants.PUB, topic)), message)
python
def pub(self, topic, message): '''Publish to a topic''' return self.send(' '.join((constants.PUB, topic)), message)
[ "def", "pub", "(", "self", ",", "topic", ",", "message", ")", ":", "return", "self", ".", "send", "(", "' '", ".", "join", "(", "(", "constants", ".", "PUB", ",", "topic", ")", ")", ",", "message", ")" ]
Publish to a topic
[ "Publish", "to", "a", "topic" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L292-L294
dlecocq/nsq-py
nsq/connection.py
Connection.mpub
def mpub(self, topic, *messages): '''Publish multiple messages to a topic''' return self.send(constants.MPUB + ' ' + topic, messages)
python
def mpub(self, topic, *messages): '''Publish multiple messages to a topic''' return self.send(constants.MPUB + ' ' + topic, messages)
[ "def", "mpub", "(", "self", ",", "topic", ",", "*", "messages", ")", ":", "return", "self", ".", "send", "(", "constants", ".", "MPUB", "+", "' '", "+", "topic", ",", "messages", ")" ]
Publish multiple messages to a topic
[ "Publish", "multiple", "messages", "to", "a", "topic" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L296-L298
dlecocq/nsq-py
nsq/connection.py
Connection.rdy
def rdy(self, count): '''Indicate that you're ready to receive''' self.ready = count self.last_ready_sent = count return self.send(constants.RDY + ' ' + str(count))
python
def rdy(self, count): '''Indicate that you're ready to receive''' self.ready = count self.last_ready_sent = count return self.send(constants.RDY + ' ' + str(count))
[ "def", "rdy", "(", "self", ",", "count", ")", ":", "self", ".", "ready", "=", "count", "self", ".", "last_ready_sent", "=", "count", "return", "self", ".", "send", "(", "constants", ".", "RDY", "+", "' '", "+", "str", "(", "count", ")", ")" ]
Indicate that you're ready to receive
[ "Indicate", "that", "you", "re", "ready", "to", "receive" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L300-L304
dlecocq/nsq-py
nsq/connection.py
Connection.req
def req(self, message_id, timeout): '''Re-queue a message''' return self.send(constants.REQ + ' ' + message_id + ' ' + str(timeout))
python
def req(self, message_id, timeout): '''Re-queue a message''' return self.send(constants.REQ + ' ' + message_id + ' ' + str(timeout))
[ "def", "req", "(", "self", ",", "message_id", ",", "timeout", ")", ":", "return", "self", ".", "send", "(", "constants", ".", "REQ", "+", "' '", "+", "message_id", "+", "' '", "+", "str", "(", "timeout", ")", ")" ]
Re-queue a message
[ "Re", "-", "queue", "a", "message" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L310-L312
dlecocq/nsq-py
nsq/connection.py
Connection._read
def _read(self, limit=1000): '''Return all the responses read''' # It's important to know that it may return no responses or multiple # responses. It depends on how the buffering works out. First, read from # the socket for sock in self.socket(): if sock is None: # Race condition. Connection has been closed. return [] try: packet = sock.recv(4096) except socket.timeout: # If the socket times out, return nothing return [] except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] in self.WOULD_BLOCK_ERRS: return [] else: raise # Append our newly-read data to our buffer self._buffer += packet responses = [] total = 0 buf = self._buffer remaining = len(buf) while limit and (remaining >= 4): size = struct.unpack('>l', buf[total:(total + 4)])[0] # Now check to see if there's enough left in the buffer to read # the message. if (remaining - 4) >= size: responses.append(Response.from_raw( self, buf[(total + 4):(total + size + 4)])) total += (size + 4) remaining -= (size + 4) limit -= 1 else: break self._buffer = self._buffer[total:] return responses
python
def _read(self, limit=1000): '''Return all the responses read''' # It's important to know that it may return no responses or multiple # responses. It depends on how the buffering works out. First, read from # the socket for sock in self.socket(): if sock is None: # Race condition. Connection has been closed. return [] try: packet = sock.recv(4096) except socket.timeout: # If the socket times out, return nothing return [] except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] in self.WOULD_BLOCK_ERRS: return [] else: raise # Append our newly-read data to our buffer self._buffer += packet responses = [] total = 0 buf = self._buffer remaining = len(buf) while limit and (remaining >= 4): size = struct.unpack('>l', buf[total:(total + 4)])[0] # Now check to see if there's enough left in the buffer to read # the message. if (remaining - 4) >= size: responses.append(Response.from_raw( self, buf[(total + 4):(total + size + 4)])) total += (size + 4) remaining -= (size + 4) limit -= 1 else: break self._buffer = self._buffer[total:] return responses
[ "def", "_read", "(", "self", ",", "limit", "=", "1000", ")", ":", "# It's important to know that it may return no responses or multiple", "# responses. It depends on how the buffering works out. First, read from", "# the socket", "for", "sock", "in", "self", ".", "socket", "(", ")", ":", "if", "sock", "is", "None", ":", "# Race condition. Connection has been closed.", "return", "[", "]", "try", ":", "packet", "=", "sock", ".", "recv", "(", "4096", ")", "except", "socket", ".", "timeout", ":", "# If the socket times out, return nothing", "return", "[", "]", "except", "socket", ".", "error", "as", "exc", ":", "# Catch (errno, message)-type socket.errors", "if", "exc", ".", "args", "[", "0", "]", "in", "self", ".", "WOULD_BLOCK_ERRS", ":", "return", "[", "]", "else", ":", "raise", "# Append our newly-read data to our buffer", "self", ".", "_buffer", "+=", "packet", "responses", "=", "[", "]", "total", "=", "0", "buf", "=", "self", ".", "_buffer", "remaining", "=", "len", "(", "buf", ")", "while", "limit", "and", "(", "remaining", ">=", "4", ")", ":", "size", "=", "struct", ".", "unpack", "(", "'>l'", ",", "buf", "[", "total", ":", "(", "total", "+", "4", ")", "]", ")", "[", "0", "]", "# Now check to see if there's enough left in the buffer to read", "# the message.", "if", "(", "remaining", "-", "4", ")", ">=", "size", ":", "responses", ".", "append", "(", "Response", ".", "from_raw", "(", "self", ",", "buf", "[", "(", "total", "+", "4", ")", ":", "(", "total", "+", "size", "+", "4", ")", "]", ")", ")", "total", "+=", "(", "size", "+", "4", ")", "remaining", "-=", "(", "size", "+", "4", ")", "limit", "-=", "1", "else", ":", "break", "self", ".", "_buffer", "=", "self", ".", "_buffer", "[", "total", ":", "]", "return", "responses" ]
Return all the responses read
[ "Return", "all", "the", "responses", "read" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L330-L371
dlecocq/nsq-py
nsq/connection.py
Connection.read
def read(self): '''Responses from an established socket''' responses = self._read() # Determine the number of messages in here and decrement our ready # count appropriately self.ready -= sum( map(int, (r.frame_type == Message.FRAME_TYPE for r in responses))) return responses
python
def read(self): '''Responses from an established socket''' responses = self._read() # Determine the number of messages in here and decrement our ready # count appropriately self.ready -= sum( map(int, (r.frame_type == Message.FRAME_TYPE for r in responses))) return responses
[ "def", "read", "(", "self", ")", ":", "responses", "=", "self", ".", "_read", "(", ")", "# Determine the number of messages in here and decrement our ready", "# count appropriately", "self", ".", "ready", "-=", "sum", "(", "map", "(", "int", ",", "(", "r", ".", "frame_type", "==", "Message", ".", "FRAME_TYPE", "for", "r", "in", "responses", ")", ")", ")", "return", "responses" ]
Responses from an established socket
[ "Responses", "from", "an", "established", "socket" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L373-L380
dlecocq/nsq-py
nsq/client.py
Client.discover
def discover(self, topic): '''Run the discovery mechanism''' logger.info('Discovering on topic %s', topic) producers = [] for lookupd in self._lookupd: logger.info('Discovering on %s', lookupd) try: # Find all the current producers on this instance for producer in lookupd.lookup(topic)['producers']: logger.info('Found producer %s on %s', producer, lookupd) producers.append( (producer['broadcast_address'], producer['tcp_port'])) except ClientException: logger.exception('Failed to query %s', lookupd) new = [] for host, port in producers: conn = self._connections.get((host, port)) if not conn: logger.info('Discovered %s:%s', host, port) new.append(self.connect(host, port)) elif not conn.alive(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Connection to %s:%s still alive', host, port) # And return all the new connections return [conn for conn in new if conn]
python
def discover(self, topic): '''Run the discovery mechanism''' logger.info('Discovering on topic %s', topic) producers = [] for lookupd in self._lookupd: logger.info('Discovering on %s', lookupd) try: # Find all the current producers on this instance for producer in lookupd.lookup(topic)['producers']: logger.info('Found producer %s on %s', producer, lookupd) producers.append( (producer['broadcast_address'], producer['tcp_port'])) except ClientException: logger.exception('Failed to query %s', lookupd) new = [] for host, port in producers: conn = self._connections.get((host, port)) if not conn: logger.info('Discovered %s:%s', host, port) new.append(self.connect(host, port)) elif not conn.alive(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Connection to %s:%s still alive', host, port) # And return all the new connections return [conn for conn in new if conn]
[ "def", "discover", "(", "self", ",", "topic", ")", ":", "logger", ".", "info", "(", "'Discovering on topic %s'", ",", "topic", ")", "producers", "=", "[", "]", "for", "lookupd", "in", "self", ".", "_lookupd", ":", "logger", ".", "info", "(", "'Discovering on %s'", ",", "lookupd", ")", "try", ":", "# Find all the current producers on this instance", "for", "producer", "in", "lookupd", ".", "lookup", "(", "topic", ")", "[", "'producers'", "]", ":", "logger", ".", "info", "(", "'Found producer %s on %s'", ",", "producer", ",", "lookupd", ")", "producers", ".", "append", "(", "(", "producer", "[", "'broadcast_address'", "]", ",", "producer", "[", "'tcp_port'", "]", ")", ")", "except", "ClientException", ":", "logger", ".", "exception", "(", "'Failed to query %s'", ",", "lookupd", ")", "new", "=", "[", "]", "for", "host", ",", "port", "in", "producers", ":", "conn", "=", "self", ".", "_connections", ".", "get", "(", "(", "host", ",", "port", ")", ")", "if", "not", "conn", ":", "logger", ".", "info", "(", "'Discovered %s:%s'", ",", "host", ",", "port", ")", "new", ".", "append", "(", "self", ".", "connect", "(", "host", ",", "port", ")", ")", "elif", "not", "conn", ".", "alive", "(", ")", ":", "logger", ".", "info", "(", "'Reconnecting to %s:%s'", ",", "host", ",", "port", ")", "if", "conn", ".", "connect", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "reconnected", "(", "conn", ")", "else", ":", "logger", ".", "debug", "(", "'Connection to %s:%s still alive'", ",", "host", ",", "port", ")", "# And return all the new connections", "return", "[", "conn", "for", "conn", "in", "new", "if", "conn", "]" ]
Run the discovery mechanism
[ "Run", "the", "discovery", "mechanism" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L59-L89
dlecocq/nsq-py
nsq/client.py
Client.check_connections
def check_connections(self): '''Connect to all the appropriate instances''' logger.info('Checking connections') if self._lookupd: self.discover(self._topic) # Make sure we're connected to all the prescribed hosts for hostspec in self._nsqd_tcp_addresses: logger.debug('Checking nsqd instance %s', hostspec) host, port = hostspec.split(':') port = int(port) conn = self._connections.get((host, port), None) # If there is no connection to it, we have to try to connect if not conn: logger.info('Connecting to %s:%s', host, port) self.connect(host, port) elif not conn.alive(): # If we've connected to it before, but it's no longer alive, # we'll have to make a decision about when to try to reconnect # to it, if we need to reconnect to it at all if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Checking freshness') now = time.time() time_check = math.ceil(now - self.last_recv_timestamp) if time_check >= ((self.heartbeat_interval * 2) / 1000.0): if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn)
python
def check_connections(self): '''Connect to all the appropriate instances''' logger.info('Checking connections') if self._lookupd: self.discover(self._topic) # Make sure we're connected to all the prescribed hosts for hostspec in self._nsqd_tcp_addresses: logger.debug('Checking nsqd instance %s', hostspec) host, port = hostspec.split(':') port = int(port) conn = self._connections.get((host, port), None) # If there is no connection to it, we have to try to connect if not conn: logger.info('Connecting to %s:%s', host, port) self.connect(host, port) elif not conn.alive(): # If we've connected to it before, but it's no longer alive, # we'll have to make a decision about when to try to reconnect # to it, if we need to reconnect to it at all if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Checking freshness') now = time.time() time_check = math.ceil(now - self.last_recv_timestamp) if time_check >= ((self.heartbeat_interval * 2) / 1000.0): if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn)
[ "def", "check_connections", "(", "self", ")", ":", "logger", ".", "info", "(", "'Checking connections'", ")", "if", "self", ".", "_lookupd", ":", "self", ".", "discover", "(", "self", ".", "_topic", ")", "# Make sure we're connected to all the prescribed hosts", "for", "hostspec", "in", "self", ".", "_nsqd_tcp_addresses", ":", "logger", ".", "debug", "(", "'Checking nsqd instance %s'", ",", "hostspec", ")", "host", ",", "port", "=", "hostspec", ".", "split", "(", "':'", ")", "port", "=", "int", "(", "port", ")", "conn", "=", "self", ".", "_connections", ".", "get", "(", "(", "host", ",", "port", ")", ",", "None", ")", "# If there is no connection to it, we have to try to connect", "if", "not", "conn", ":", "logger", ".", "info", "(", "'Connecting to %s:%s'", ",", "host", ",", "port", ")", "self", ".", "connect", "(", "host", ",", "port", ")", "elif", "not", "conn", ".", "alive", "(", ")", ":", "# If we've connected to it before, but it's no longer alive,", "# we'll have to make a decision about when to try to reconnect", "# to it, if we need to reconnect to it at all", "if", "conn", ".", "ready_to_reconnect", "(", ")", ":", "logger", ".", "info", "(", "'Reconnecting to %s:%s'", ",", "host", ",", "port", ")", "if", "conn", ".", "connect", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "reconnected", "(", "conn", ")", "else", ":", "logger", ".", "debug", "(", "'Checking freshness'", ")", "now", "=", "time", ".", "time", "(", ")", "time_check", "=", "math", ".", "ceil", "(", "now", "-", "self", ".", "last_recv_timestamp", ")", "if", "time_check", ">=", "(", "(", "self", ".", "heartbeat_interval", "*", "2", ")", "/", "1000.0", ")", ":", "if", "conn", ".", "ready_to_reconnect", "(", ")", ":", "logger", ".", "info", "(", "'Reconnecting to %s:%s'", ",", "host", ",", "port", ")", "if", "conn", ".", "connect", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "reconnected", "(", "conn", ")" ]
Connect to all the appropriate instances
[ "Connect", "to", "all", "the", "appropriate", "instances" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L91-L125
dlecocq/nsq-py
nsq/client.py
Client.connection_checker
def connection_checker(self): '''Run periodic reconnection checks''' thread = ConnectionChecker(self) logger.info('Starting connection-checker thread') thread.start() try: yield thread finally: logger.info('Stopping connection-checker') thread.stop() logger.info('Joining connection-checker') thread.join()
python
def connection_checker(self): '''Run periodic reconnection checks''' thread = ConnectionChecker(self) logger.info('Starting connection-checker thread') thread.start() try: yield thread finally: logger.info('Stopping connection-checker') thread.stop() logger.info('Joining connection-checker') thread.join()
[ "def", "connection_checker", "(", "self", ")", ":", "thread", "=", "ConnectionChecker", "(", "self", ")", "logger", ".", "info", "(", "'Starting connection-checker thread'", ")", "thread", ".", "start", "(", ")", "try", ":", "yield", "thread", "finally", ":", "logger", ".", "info", "(", "'Stopping connection-checker'", ")", "thread", ".", "stop", "(", ")", "logger", ".", "info", "(", "'Joining connection-checker'", ")", "thread", ".", "join", "(", ")" ]
Run periodic reconnection checks
[ "Run", "periodic", "reconnection", "checks" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L128-L139
dlecocq/nsq-py
nsq/client.py
Client.connect
def connect(self, host, port): '''Connect to the provided host, port''' conn = connection.Connection(host, port, reconnection_backoff=self._reconnection_backoff, auth_secret=self._auth_secret, timeout=self._connect_timeout, **self._identify_options) if conn.alive(): conn.setblocking(0) self.add(conn) return conn
python
def connect(self, host, port): '''Connect to the provided host, port''' conn = connection.Connection(host, port, reconnection_backoff=self._reconnection_backoff, auth_secret=self._auth_secret, timeout=self._connect_timeout, **self._identify_options) if conn.alive(): conn.setblocking(0) self.add(conn) return conn
[ "def", "connect", "(", "self", ",", "host", ",", "port", ")", ":", "conn", "=", "connection", ".", "Connection", "(", "host", ",", "port", ",", "reconnection_backoff", "=", "self", ".", "_reconnection_backoff", ",", "auth_secret", "=", "self", ".", "_auth_secret", ",", "timeout", "=", "self", ".", "_connect_timeout", ",", "*", "*", "self", ".", "_identify_options", ")", "if", "conn", ".", "alive", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "add", "(", "conn", ")", "return", "conn" ]
Connect to the provided host, port
[ "Connect", "to", "the", "provided", "host", "port" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L141-L151
dlecocq/nsq-py
nsq/client.py
Client.add
def add(self, connection): '''Add a connection''' key = (connection.host, connection.port) with self._lock: if key not in self._connections: self._connections[key] = connection self.added(connection) return connection else: return None
python
def add(self, connection): '''Add a connection''' key = (connection.host, connection.port) with self._lock: if key not in self._connections: self._connections[key] = connection self.added(connection) return connection else: return None
[ "def", "add", "(", "self", ",", "connection", ")", ":", "key", "=", "(", "connection", ".", "host", ",", "connection", ".", "port", ")", "with", "self", ".", "_lock", ":", "if", "key", "not", "in", "self", ".", "_connections", ":", "self", ".", "_connections", "[", "key", "]", "=", "connection", "self", ".", "added", "(", "connection", ")", "return", "connection", "else", ":", "return", "None" ]
Add a connection
[ "Add", "a", "connection" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L164-L173
dlecocq/nsq-py
nsq/client.py
Client.remove
def remove(self, connection): '''Remove a connection''' key = (connection.host, connection.port) with self._lock: found = self._connections.pop(key, None) try: self.close_connection(found) except Exception as exc: logger.warn('Failed to close %s: %s', connection, exc) return found
python
def remove(self, connection): '''Remove a connection''' key = (connection.host, connection.port) with self._lock: found = self._connections.pop(key, None) try: self.close_connection(found) except Exception as exc: logger.warn('Failed to close %s: %s', connection, exc) return found
[ "def", "remove", "(", "self", ",", "connection", ")", ":", "key", "=", "(", "connection", ".", "host", ",", "connection", ".", "port", ")", "with", "self", ".", "_lock", ":", "found", "=", "self", ".", "_connections", ".", "pop", "(", "key", ",", "None", ")", "try", ":", "self", ".", "close_connection", "(", "found", ")", "except", "Exception", "as", "exc", ":", "logger", ".", "warn", "(", "'Failed to close %s: %s'", ",", "connection", ",", "exc", ")", "return", "found" ]
Remove a connection
[ "Remove", "a", "connection" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L175-L184
dlecocq/nsq-py
nsq/client.py
Client.read
def read(self): '''Read from any of the connections that need it''' # We'll check all living connections connections = [c for c in self.connections() if c.alive()] if not connections: # If there are no connections, obviously we return no messages, but # we should wait the duration of the timeout time.sleep(self._timeout) return [] # Not all connections need to be written to, so we'll only concern # ourselves with those that require writes writes = [c for c in connections if c.pending()] try: readable, writable, exceptable = select.select( connections, writes, connections, self._timeout) except exceptions.ConnectionClosedException: logger.exception('Tried selecting on closed client') return [] except select.error: logger.exception('Error running select') return [] # If we returned because the timeout interval passed, log it and return if not (readable or writable or exceptable): logger.debug('Timed out...') return [] responses = [] # For each readable socket, we'll try to read some responses for conn in readable: try: for res in conn.read(): # We'll capture heartbeats and respond to them automatically if (isinstance(res, Response) and res.data == HEARTBEAT): logger.info('Sending heartbeat to %s', conn) conn.nop() logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() continue elif isinstance(res, Error): nonfatal = ( exceptions.FinFailedException, exceptions.ReqFailedException, exceptions.TouchFailedException ) if not isinstance(res.exception(), nonfatal): # If it's not any of the non-fatal exceptions, then # we have to close this connection logger.error( 'Closing %s: %s', conn, res.exception()) self.close_connection(conn) responses.append(res) logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() except exceptions.NSQException: logger.exception('Failed to read from %s', conn) self.close_connection(conn) except socket.error: logger.exception('Failed to read from %s', conn) self.close_connection(conn) # For each writable socket, flush some data out for conn in writable: try: conn.flush() except socket.error: logger.exception('Failed to flush %s', conn) self.close_connection(conn) # For each connection with an exception, try to close it and remove it # from our connections for conn in exceptable: self.close_connection(conn) return responses
python
def read(self): '''Read from any of the connections that need it''' # We'll check all living connections connections = [c for c in self.connections() if c.alive()] if not connections: # If there are no connections, obviously we return no messages, but # we should wait the duration of the timeout time.sleep(self._timeout) return [] # Not all connections need to be written to, so we'll only concern # ourselves with those that require writes writes = [c for c in connections if c.pending()] try: readable, writable, exceptable = select.select( connections, writes, connections, self._timeout) except exceptions.ConnectionClosedException: logger.exception('Tried selecting on closed client') return [] except select.error: logger.exception('Error running select') return [] # If we returned because the timeout interval passed, log it and return if not (readable or writable or exceptable): logger.debug('Timed out...') return [] responses = [] # For each readable socket, we'll try to read some responses for conn in readable: try: for res in conn.read(): # We'll capture heartbeats and respond to them automatically if (isinstance(res, Response) and res.data == HEARTBEAT): logger.info('Sending heartbeat to %s', conn) conn.nop() logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() continue elif isinstance(res, Error): nonfatal = ( exceptions.FinFailedException, exceptions.ReqFailedException, exceptions.TouchFailedException ) if not isinstance(res.exception(), nonfatal): # If it's not any of the non-fatal exceptions, then # we have to close this connection logger.error( 'Closing %s: %s', conn, res.exception()) self.close_connection(conn) responses.append(res) logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() except exceptions.NSQException: logger.exception('Failed to read from %s', conn) self.close_connection(conn) except socket.error: logger.exception('Failed to read from %s', conn) self.close_connection(conn) # For each writable socket, flush some data out for conn in writable: try: conn.flush() except socket.error: logger.exception('Failed to flush %s', conn) self.close_connection(conn) # For each connection with an exception, try to close it and remove it # from our connections for conn in exceptable: self.close_connection(conn) return responses
[ "def", "read", "(", "self", ")", ":", "# We'll check all living connections", "connections", "=", "[", "c", "for", "c", "in", "self", ".", "connections", "(", ")", "if", "c", ".", "alive", "(", ")", "]", "if", "not", "connections", ":", "# If there are no connections, obviously we return no messages, but", "# we should wait the duration of the timeout", "time", ".", "sleep", "(", "self", ".", "_timeout", ")", "return", "[", "]", "# Not all connections need to be written to, so we'll only concern", "# ourselves with those that require writes", "writes", "=", "[", "c", "for", "c", "in", "connections", "if", "c", ".", "pending", "(", ")", "]", "try", ":", "readable", ",", "writable", ",", "exceptable", "=", "select", ".", "select", "(", "connections", ",", "writes", ",", "connections", ",", "self", ".", "_timeout", ")", "except", "exceptions", ".", "ConnectionClosedException", ":", "logger", ".", "exception", "(", "'Tried selecting on closed client'", ")", "return", "[", "]", "except", "select", ".", "error", ":", "logger", ".", "exception", "(", "'Error running select'", ")", "return", "[", "]", "# If we returned because the timeout interval passed, log it and return", "if", "not", "(", "readable", "or", "writable", "or", "exceptable", ")", ":", "logger", ".", "debug", "(", "'Timed out...'", ")", "return", "[", "]", "responses", "=", "[", "]", "# For each readable socket, we'll try to read some responses", "for", "conn", "in", "readable", ":", "try", ":", "for", "res", "in", "conn", ".", "read", "(", ")", ":", "# We'll capture heartbeats and respond to them automatically", "if", "(", "isinstance", "(", "res", ",", "Response", ")", "and", "res", ".", "data", "==", "HEARTBEAT", ")", ":", "logger", ".", "info", "(", "'Sending heartbeat to %s'", ",", "conn", ")", "conn", ".", "nop", "(", ")", "logger", ".", "debug", "(", "'Setting last_recv_timestamp'", ")", "self", ".", "last_recv_timestamp", "=", "time", ".", "time", "(", ")", "continue", "elif", "isinstance", "(", "res", ",", "Error", ")", ":", "nonfatal", "=", "(", "exceptions", ".", "FinFailedException", ",", "exceptions", ".", "ReqFailedException", ",", "exceptions", ".", "TouchFailedException", ")", "if", "not", "isinstance", "(", "res", ".", "exception", "(", ")", ",", "nonfatal", ")", ":", "# If it's not any of the non-fatal exceptions, then", "# we have to close this connection", "logger", ".", "error", "(", "'Closing %s: %s'", ",", "conn", ",", "res", ".", "exception", "(", ")", ")", "self", ".", "close_connection", "(", "conn", ")", "responses", ".", "append", "(", "res", ")", "logger", ".", "debug", "(", "'Setting last_recv_timestamp'", ")", "self", ".", "last_recv_timestamp", "=", "time", ".", "time", "(", ")", "except", "exceptions", ".", "NSQException", ":", "logger", ".", "exception", "(", "'Failed to read from %s'", ",", "conn", ")", "self", ".", "close_connection", "(", "conn", ")", "except", "socket", ".", "error", ":", "logger", ".", "exception", "(", "'Failed to read from %s'", ",", "conn", ")", "self", ".", "close_connection", "(", "conn", ")", "# For each writable socket, flush some data out", "for", "conn", "in", "writable", ":", "try", ":", "conn", ".", "flush", "(", ")", "except", "socket", ".", "error", ":", "logger", ".", "exception", "(", "'Failed to flush %s'", ",", "conn", ")", "self", ".", "close_connection", "(", "conn", ")", "# For each connection with an exception, try to close it and remove it", "# from our connections", "for", "conn", "in", "exceptable", ":", "self", ".", "close_connection", "(", "conn", ")", "return", "responses" ]
Read from any of the connections that need it
[ "Read", "from", "any", "of", "the", "connections", "that", "need", "it" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L194-L270
dlecocq/nsq-py
nsq/client.py
Client.random_connection
def random_connection(self): '''Pick a random living connection''' # While at the moment there's no need for this to be a context manager # per se, I would like to use that interface since I anticipate # adding some wrapping around it at some point. yield random.choice( [conn for conn in self.connections() if conn.alive()])
python
def random_connection(self): '''Pick a random living connection''' # While at the moment there's no need for this to be a context manager # per se, I would like to use that interface since I anticipate # adding some wrapping around it at some point. yield random.choice( [conn for conn in self.connections() if conn.alive()])
[ "def", "random_connection", "(", "self", ")", ":", "# While at the moment there's no need for this to be a context manager", "# per se, I would like to use that interface since I anticipate", "# adding some wrapping around it at some point.", "yield", "random", ".", "choice", "(", "[", "conn", "for", "conn", "in", "self", ".", "connections", "(", ")", "if", "conn", ".", "alive", "(", ")", "]", ")" ]
Pick a random living connection
[ "Pick", "a", "random", "living", "connection" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L273-L279
dlecocq/nsq-py
nsq/client.py
Client.wait_response
def wait_response(self): '''Wait for a response''' responses = self.read() while not responses: responses = self.read() return responses
python
def wait_response(self): '''Wait for a response''' responses = self.read() while not responses: responses = self.read() return responses
[ "def", "wait_response", "(", "self", ")", ":", "responses", "=", "self", ".", "read", "(", ")", "while", "not", "responses", ":", "responses", "=", "self", ".", "read", "(", ")", "return", "responses" ]
Wait for a response
[ "Wait", "for", "a", "response" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L281-L286
dlecocq/nsq-py
nsq/client.py
Client.pub
def pub(self, topic, message): '''Publish the provided message to the provided topic''' with self.random_connection() as client: client.pub(topic, message) return self.wait_response()
python
def pub(self, topic, message): '''Publish the provided message to the provided topic''' with self.random_connection() as client: client.pub(topic, message) return self.wait_response()
[ "def", "pub", "(", "self", ",", "topic", ",", "message", ")", ":", "with", "self", ".", "random_connection", "(", ")", "as", "client", ":", "client", ".", "pub", "(", "topic", ",", "message", ")", "return", "self", ".", "wait_response", "(", ")" ]
Publish the provided message to the provided topic
[ "Publish", "the", "provided", "message", "to", "the", "provided", "topic" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L293-L297
dlecocq/nsq-py
nsq/client.py
Client.mpub
def mpub(self, topic, *messages): '''Publish messages to a topic''' with self.random_connection() as client: client.mpub(topic, *messages) return self.wait_response()
python
def mpub(self, topic, *messages): '''Publish messages to a topic''' with self.random_connection() as client: client.mpub(topic, *messages) return self.wait_response()
[ "def", "mpub", "(", "self", ",", "topic", ",", "*", "messages", ")", ":", "with", "self", ".", "random_connection", "(", ")", "as", "client", ":", "client", ".", "mpub", "(", "topic", ",", "*", "messages", ")", "return", "self", ".", "wait_response", "(", ")" ]
Publish messages to a topic
[ "Publish", "messages", "to", "a", "topic" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L299-L303
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.create_socket
def create_socket(self): """Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket. """ socket_path = os.path.join(self.config_dir, 'pueue.sock') # Create Socket and exit with 1, if socket can't be created try: if os.path.exists(socket_path): os.remove(socket_path) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(socket_path) self.socket.setblocking(0) self.socket.listen(0) # Set file permissions os.chmod(socket_path, stat.S_IRWXU) except Exception: self.logger.error("Daemon couldn't socket. Aborting") self.logger.exception() sys.exit(1) return self.socket
python
def create_socket(self): """Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket. """ socket_path = os.path.join(self.config_dir, 'pueue.sock') # Create Socket and exit with 1, if socket can't be created try: if os.path.exists(socket_path): os.remove(socket_path) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(socket_path) self.socket.setblocking(0) self.socket.listen(0) # Set file permissions os.chmod(socket_path, stat.S_IRWXU) except Exception: self.logger.error("Daemon couldn't socket. Aborting") self.logger.exception() sys.exit(1) return self.socket
[ "def", "create_socket", "(", "self", ")", ":", "socket_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue.sock'", ")", "# Create Socket and exit with 1, if socket can't be created", "try", ":", "if", "os", ".", "path", ".", "exists", "(", "socket_path", ")", ":", "os", ".", "remove", "(", "socket_path", ")", "self", ".", "socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_STREAM", ")", "self", ".", "socket", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "self", ".", "socket", ".", "bind", "(", "socket_path", ")", "self", ".", "socket", ".", "setblocking", "(", "0", ")", "self", ".", "socket", ".", "listen", "(", "0", ")", "# Set file permissions", "os", ".", "chmod", "(", "socket_path", ",", "stat", ".", "S_IRWXU", ")", "except", "Exception", ":", "self", ".", "logger", ".", "error", "(", "\"Daemon couldn't socket. Aborting\"", ")", "self", ".", "logger", ".", "exception", "(", ")", "sys", ".", "exit", "(", "1", ")", "return", "self", ".", "socket" ]
Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket.
[ "Create", "a", "socket", "for", "the", "daemon", "depending", "on", "the", "directory", "location", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L82-L109
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.initialize_directories
def initialize_directories(self, root_dir): """Create all directories needed for logs and configs.""" if not root_dir: root_dir = os.path.expanduser('~') # Create config directory, if it doesn't exist self.config_dir = os.path.join(root_dir, '.config/pueue') if not os.path.exists(self.config_dir): os.makedirs(self.config_dir)
python
def initialize_directories(self, root_dir): """Create all directories needed for logs and configs.""" if not root_dir: root_dir = os.path.expanduser('~') # Create config directory, if it doesn't exist self.config_dir = os.path.join(root_dir, '.config/pueue') if not os.path.exists(self.config_dir): os.makedirs(self.config_dir)
[ "def", "initialize_directories", "(", "self", ",", "root_dir", ")", ":", "if", "not", "root_dir", ":", "root_dir", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "# Create config directory, if it doesn't exist", "self", ".", "config_dir", "=", "os", ".", "path", ".", "join", "(", "root_dir", ",", "'.config/pueue'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "config_dir", ")", ":", "os", ".", "makedirs", "(", "self", ".", "config_dir", ")" ]
Create all directories needed for logs and configs.
[ "Create", "all", "directories", "needed", "for", "logs", "and", "configs", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L111-L119
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.respond_client
def respond_client(self, answer, socket): """Send an answer to the client.""" response = pickle.dumps(answer, -1) socket.sendall(response) self.read_list.remove(socket) socket.close()
python
def respond_client(self, answer, socket): """Send an answer to the client.""" response = pickle.dumps(answer, -1) socket.sendall(response) self.read_list.remove(socket) socket.close()
[ "def", "respond_client", "(", "self", ",", "answer", ",", "socket", ")", ":", "response", "=", "pickle", ".", "dumps", "(", "answer", ",", "-", "1", ")", "socket", ".", "sendall", "(", "response", ")", "self", ".", "read_list", ".", "remove", "(", "socket", ")", "socket", ".", "close", "(", ")" ]
Send an answer to the client.
[ "Send", "an", "answer", "to", "the", "client", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L121-L126
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.read_config
def read_config(self): """Read a previous configuration file or create a new with default values.""" config_file = os.path.join(self.config_dir, 'pueue.ini') self.config = configparser.ConfigParser() # Try to get configuration file and return it # If this doesn't work, a new default config file will be created if os.path.exists(config_file): try: self.config.read(config_file) return except Exception: self.logger.error('Error while parsing config file. Deleting old config') self.logger.exception() self.config['default'] = { 'resumeAfterStart': False, 'maxProcesses': 1, 'customShell': 'default', } self.config['log'] = { 'logTime': 60*60*24*14, } self.write_config()
python
def read_config(self): """Read a previous configuration file or create a new with default values.""" config_file = os.path.join(self.config_dir, 'pueue.ini') self.config = configparser.ConfigParser() # Try to get configuration file and return it # If this doesn't work, a new default config file will be created if os.path.exists(config_file): try: self.config.read(config_file) return except Exception: self.logger.error('Error while parsing config file. Deleting old config') self.logger.exception() self.config['default'] = { 'resumeAfterStart': False, 'maxProcesses': 1, 'customShell': 'default', } self.config['log'] = { 'logTime': 60*60*24*14, } self.write_config()
[ "def", "read_config", "(", "self", ")", ":", "config_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue.ini'", ")", "self", ".", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "# Try to get configuration file and return it", "# If this doesn't work, a new default config file will be created", "if", "os", ".", "path", ".", "exists", "(", "config_file", ")", ":", "try", ":", "self", ".", "config", ".", "read", "(", "config_file", ")", "return", "except", "Exception", ":", "self", ".", "logger", ".", "error", "(", "'Error while parsing config file. Deleting old config'", ")", "self", ".", "logger", ".", "exception", "(", ")", "self", ".", "config", "[", "'default'", "]", "=", "{", "'resumeAfterStart'", ":", "False", ",", "'maxProcesses'", ":", "1", ",", "'customShell'", ":", "'default'", ",", "}", "self", ".", "config", "[", "'log'", "]", "=", "{", "'logTime'", ":", "60", "*", "60", "*", "24", "*", "14", ",", "}", "self", ".", "write_config", "(", ")" ]
Read a previous configuration file or create a new with default values.
[ "Read", "a", "previous", "configuration", "file", "or", "create", "a", "new", "with", "default", "values", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L128-L150
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.write_config
def write_config(self): """Write the current configuration to the config file.""" config_file = os.path.join(self.config_dir, 'pueue.ini') with open(config_file, 'w') as file_descriptor: self.config.write(file_descriptor)
python
def write_config(self): """Write the current configuration to the config file.""" config_file = os.path.join(self.config_dir, 'pueue.ini') with open(config_file, 'w') as file_descriptor: self.config.write(file_descriptor)
[ "def", "write_config", "(", "self", ")", ":", "config_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue.ini'", ")", "with", "open", "(", "config_file", ",", "'w'", ")", "as", "file_descriptor", ":", "self", ".", "config", ".", "write", "(", "file_descriptor", ")" ]
Write the current configuration to the config file.
[ "Write", "the", "current", "configuration", "to", "the", "config", "file", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L152-L156
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.main
def main(self): """The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit """ try: while self.running: # Trigger the processing of finished processes by the ProcessHandler. # If there are finished processes we write the log to keep it up to date. if self.process_handler.check_finished(): self.logger.write(self.queue) if self.reset and self.process_handler.all_finished(): # Rotate log and reset queue self.logger.rotate(self.queue) self.queue.reset() self.reset = False # Check if the ProcessHandler has any free slots to spawn a new process if not self.paused and not self.reset and self.running: self.process_handler.check_for_new() # This is the communication section of the daemon. # 1. Receive message from the client # 2. Check payload and call respective function with payload as parameter. # 3. Execute logic # 4. Return payload with response to client # Create list for waitable objects readable, writable, failed = select.select(self.read_list, [], [], 1) for waiting_socket in readable: if waiting_socket is self.socket: # Listening for clients to connect. # Client sockets are added to readlist to be processed. try: client_socket, client_address = self.socket.accept() self.read_list.append(client_socket) except Exception: self.logger.warning('Daemon rejected client') else: # Trying to receive instruction from client socket try: instruction = waiting_socket.recv(1048576) except (EOFError, OSError): self.logger.warning('Client died while sending message, dropping received data.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # Check for valid instruction if instruction is not None: # Check if received data can be unpickled. try: payload = pickle.loads(instruction) except EOFError: # Instruction is ignored if it can't be unpickled self.logger.error('Received message is incomplete, dropping received data.') self.read_list.remove(waiting_socket) waiting_socket.close() # Set invalid payload payload = {'mode': ''} functions = { 'add': self.add, 'remove': self.remove, 'edit': self.edit_command, 'switch': self.switch, 'send': self.pipe_to_process, 'status': self.send_status, 'start': self.start, 'pause': self.pause, 'stash': self.stash, 'enqueue': self.enqueue, 'restart': self.restart, 'kill': self.kill_process, 'reset': self.reset_everything, 'clear': self.clear, 'config': self.set_config, 'STOPDAEMON': self.stop_daemon, } if payload['mode'] in functions.keys(): self.logger.debug('Payload received:') self.logger.debug(payload) response = functions[payload['mode']](payload) self.logger.debug('Sending payload:') self.logger.debug(response) try: self.respond_client(response, waiting_socket) except (BrokenPipeError): self.logger.warning('Client disconnected during message dispatching. Function successfully executed anyway.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None else: self.respond_client({'message': 'Unknown Command', 'status': 'error'}, waiting_socket) except Exception: self.logger.exception() # Wait for killed or stopped processes to finish (cleanup) self.process_handler.wait_for_finish() # Close socket, clean everything up and exit self.socket.close() cleanup(self.config_dir) sys.exit(0)
python
def main(self): """The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit """ try: while self.running: # Trigger the processing of finished processes by the ProcessHandler. # If there are finished processes we write the log to keep it up to date. if self.process_handler.check_finished(): self.logger.write(self.queue) if self.reset and self.process_handler.all_finished(): # Rotate log and reset queue self.logger.rotate(self.queue) self.queue.reset() self.reset = False # Check if the ProcessHandler has any free slots to spawn a new process if not self.paused and not self.reset and self.running: self.process_handler.check_for_new() # This is the communication section of the daemon. # 1. Receive message from the client # 2. Check payload and call respective function with payload as parameter. # 3. Execute logic # 4. Return payload with response to client # Create list for waitable objects readable, writable, failed = select.select(self.read_list, [], [], 1) for waiting_socket in readable: if waiting_socket is self.socket: # Listening for clients to connect. # Client sockets are added to readlist to be processed. try: client_socket, client_address = self.socket.accept() self.read_list.append(client_socket) except Exception: self.logger.warning('Daemon rejected client') else: # Trying to receive instruction from client socket try: instruction = waiting_socket.recv(1048576) except (EOFError, OSError): self.logger.warning('Client died while sending message, dropping received data.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # Check for valid instruction if instruction is not None: # Check if received data can be unpickled. try: payload = pickle.loads(instruction) except EOFError: # Instruction is ignored if it can't be unpickled self.logger.error('Received message is incomplete, dropping received data.') self.read_list.remove(waiting_socket) waiting_socket.close() # Set invalid payload payload = {'mode': ''} functions = { 'add': self.add, 'remove': self.remove, 'edit': self.edit_command, 'switch': self.switch, 'send': self.pipe_to_process, 'status': self.send_status, 'start': self.start, 'pause': self.pause, 'stash': self.stash, 'enqueue': self.enqueue, 'restart': self.restart, 'kill': self.kill_process, 'reset': self.reset_everything, 'clear': self.clear, 'config': self.set_config, 'STOPDAEMON': self.stop_daemon, } if payload['mode'] in functions.keys(): self.logger.debug('Payload received:') self.logger.debug(payload) response = functions[payload['mode']](payload) self.logger.debug('Sending payload:') self.logger.debug(response) try: self.respond_client(response, waiting_socket) except (BrokenPipeError): self.logger.warning('Client disconnected during message dispatching. Function successfully executed anyway.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None else: self.respond_client({'message': 'Unknown Command', 'status': 'error'}, waiting_socket) except Exception: self.logger.exception() # Wait for killed or stopped processes to finish (cleanup) self.process_handler.wait_for_finish() # Close socket, clean everything up and exit self.socket.close() cleanup(self.config_dir) sys.exit(0)
[ "def", "main", "(", "self", ")", ":", "try", ":", "while", "self", ".", "running", ":", "# Trigger the processing of finished processes by the ProcessHandler.", "# If there are finished processes we write the log to keep it up to date.", "if", "self", ".", "process_handler", ".", "check_finished", "(", ")", ":", "self", ".", "logger", ".", "write", "(", "self", ".", "queue", ")", "if", "self", ".", "reset", "and", "self", ".", "process_handler", ".", "all_finished", "(", ")", ":", "# Rotate log and reset queue", "self", ".", "logger", ".", "rotate", "(", "self", ".", "queue", ")", "self", ".", "queue", ".", "reset", "(", ")", "self", ".", "reset", "=", "False", "# Check if the ProcessHandler has any free slots to spawn a new process", "if", "not", "self", ".", "paused", "and", "not", "self", ".", "reset", "and", "self", ".", "running", ":", "self", ".", "process_handler", ".", "check_for_new", "(", ")", "# This is the communication section of the daemon.", "# 1. Receive message from the client", "# 2. Check payload and call respective function with payload as parameter.", "# 3. Execute logic", "# 4. Return payload with response to client", "# Create list for waitable objects", "readable", ",", "writable", ",", "failed", "=", "select", ".", "select", "(", "self", ".", "read_list", ",", "[", "]", ",", "[", "]", ",", "1", ")", "for", "waiting_socket", "in", "readable", ":", "if", "waiting_socket", "is", "self", ".", "socket", ":", "# Listening for clients to connect.", "# Client sockets are added to readlist to be processed.", "try", ":", "client_socket", ",", "client_address", "=", "self", ".", "socket", ".", "accept", "(", ")", "self", ".", "read_list", ".", "append", "(", "client_socket", ")", "except", "Exception", ":", "self", ".", "logger", ".", "warning", "(", "'Daemon rejected client'", ")", "else", ":", "# Trying to receive instruction from client socket", "try", ":", "instruction", "=", "waiting_socket", ".", "recv", "(", "1048576", ")", "except", "(", "EOFError", ",", "OSError", ")", ":", "self", ".", "logger", ".", "warning", "(", "'Client died while sending message, dropping received data.'", ")", "# Remove client socket", "self", ".", "read_list", ".", "remove", "(", "waiting_socket", ")", "waiting_socket", ".", "close", "(", ")", "instruction", "=", "None", "# Check for valid instruction", "if", "instruction", "is", "not", "None", ":", "# Check if received data can be unpickled.", "try", ":", "payload", "=", "pickle", ".", "loads", "(", "instruction", ")", "except", "EOFError", ":", "# Instruction is ignored if it can't be unpickled", "self", ".", "logger", ".", "error", "(", "'Received message is incomplete, dropping received data.'", ")", "self", ".", "read_list", ".", "remove", "(", "waiting_socket", ")", "waiting_socket", ".", "close", "(", ")", "# Set invalid payload", "payload", "=", "{", "'mode'", ":", "''", "}", "functions", "=", "{", "'add'", ":", "self", ".", "add", ",", "'remove'", ":", "self", ".", "remove", ",", "'edit'", ":", "self", ".", "edit_command", ",", "'switch'", ":", "self", ".", "switch", ",", "'send'", ":", "self", ".", "pipe_to_process", ",", "'status'", ":", "self", ".", "send_status", ",", "'start'", ":", "self", ".", "start", ",", "'pause'", ":", "self", ".", "pause", ",", "'stash'", ":", "self", ".", "stash", ",", "'enqueue'", ":", "self", ".", "enqueue", ",", "'restart'", ":", "self", ".", "restart", ",", "'kill'", ":", "self", ".", "kill_process", ",", "'reset'", ":", "self", ".", "reset_everything", ",", "'clear'", ":", "self", ".", "clear", ",", "'config'", ":", "self", ".", "set_config", ",", "'STOPDAEMON'", ":", "self", ".", "stop_daemon", ",", "}", "if", "payload", "[", "'mode'", "]", "in", "functions", ".", "keys", "(", ")", ":", "self", ".", "logger", ".", "debug", "(", "'Payload received:'", ")", "self", ".", "logger", ".", "debug", "(", "payload", ")", "response", "=", "functions", "[", "payload", "[", "'mode'", "]", "]", "(", "payload", ")", "self", ".", "logger", ".", "debug", "(", "'Sending payload:'", ")", "self", ".", "logger", ".", "debug", "(", "response", ")", "try", ":", "self", ".", "respond_client", "(", "response", ",", "waiting_socket", ")", "except", "(", "BrokenPipeError", ")", ":", "self", ".", "logger", ".", "warning", "(", "'Client disconnected during message dispatching. Function successfully executed anyway.'", ")", "# Remove client socket", "self", ".", "read_list", ".", "remove", "(", "waiting_socket", ")", "waiting_socket", ".", "close", "(", ")", "instruction", "=", "None", "else", ":", "self", ".", "respond_client", "(", "{", "'message'", ":", "'Unknown Command'", ",", "'status'", ":", "'error'", "}", ",", "waiting_socket", ")", "except", "Exception", ":", "self", ".", "logger", ".", "exception", "(", ")", "# Wait for killed or stopped processes to finish (cleanup)", "self", ".", "process_handler", ".", "wait_for_finish", "(", ")", "# Close socket, clean everything up and exit", "self", ".", "socket", ".", "close", "(", ")", "cleanup", "(", "self", ".", "config_dir", ")", "sys", ".", "exit", "(", "0", ")" ]
The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit
[ "The", "main", "function", "containing", "the", "loop", "for", "communication", "and", "process", "management", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L158-L273
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.stop_daemon
def stop_daemon(self, payload=None): """Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes. """ kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.running = False return {'message': 'Pueue daemon shutting down', 'status': 'success'}
python
def stop_daemon(self, payload=None): """Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes. """ kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.running = False return {'message': 'Pueue daemon shutting down', 'status': 'success'}
[ "def", "stop_daemon", "(", "self", ",", "payload", "=", "None", ")", ":", "kill_signal", "=", "signals", "[", "'9'", "]", "self", ".", "process_handler", ".", "kill_all", "(", "kill_signal", ",", "True", ")", "self", ".", "running", "=", "False", "return", "{", "'message'", ":", "'Pueue daemon shutting down'", ",", "'status'", ":", "'success'", "}" ]
Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes.
[ "Kill", "current", "processes", "and", "initiate", "daemon", "shutdown", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L275-L285
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.set_config
def set_config(self, payload): """Update the current config depending on the payload and save it.""" self.config['default'][payload['option']] = str(payload['value']) if payload['option'] == 'maxProcesses': self.process_handler.set_max(payload['value']) if payload['option'] == 'customShell': path = payload['value'] if os.path.isfile(path) and os.access(path, os.X_OK): self.process_handler.set_shell(path) elif path == 'default': self.process_handler.set_shell() else: return {'message': "File in path doesn't exist or is not executable.", 'status': 'error'} self.write_config() return {'message': 'Configuration successfully updated.', 'status': 'success'}
python
def set_config(self, payload): """Update the current config depending on the payload and save it.""" self.config['default'][payload['option']] = str(payload['value']) if payload['option'] == 'maxProcesses': self.process_handler.set_max(payload['value']) if payload['option'] == 'customShell': path = payload['value'] if os.path.isfile(path) and os.access(path, os.X_OK): self.process_handler.set_shell(path) elif path == 'default': self.process_handler.set_shell() else: return {'message': "File in path doesn't exist or is not executable.", 'status': 'error'} self.write_config() return {'message': 'Configuration successfully updated.', 'status': 'success'}
[ "def", "set_config", "(", "self", ",", "payload", ")", ":", "self", ".", "config", "[", "'default'", "]", "[", "payload", "[", "'option'", "]", "]", "=", "str", "(", "payload", "[", "'value'", "]", ")", "if", "payload", "[", "'option'", "]", "==", "'maxProcesses'", ":", "self", ".", "process_handler", ".", "set_max", "(", "payload", "[", "'value'", "]", ")", "if", "payload", "[", "'option'", "]", "==", "'customShell'", ":", "path", "=", "payload", "[", "'value'", "]", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", "and", "os", ".", "access", "(", "path", ",", "os", ".", "X_OK", ")", ":", "self", ".", "process_handler", ".", "set_shell", "(", "path", ")", "elif", "path", "==", "'default'", ":", "self", ".", "process_handler", ".", "set_shell", "(", ")", "else", ":", "return", "{", "'message'", ":", "\"File in path doesn't exist or is not executable.\"", ",", "'status'", ":", "'error'", "}", "self", ".", "write_config", "(", ")", "return", "{", "'message'", ":", "'Configuration successfully updated.'", ",", "'status'", ":", "'success'", "}" ]
Update the current config depending on the payload and save it.
[ "Update", "the", "current", "config", "depending", "on", "the", "payload", "and", "save", "it", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L287-L306
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.pipe_to_process
def pipe_to_process(self, payload): """Send something to stdin of a specific process.""" message = payload['input'] key = payload['key'] if not self.process_handler.is_running(key): return {'message': 'No running process for this key', 'status': 'error'} self.process_handler.send_to_process(message, key) return {'message': 'Message sent', 'status': 'success'}
python
def pipe_to_process(self, payload): """Send something to stdin of a specific process.""" message = payload['input'] key = payload['key'] if not self.process_handler.is_running(key): return {'message': 'No running process for this key', 'status': 'error'} self.process_handler.send_to_process(message, key) return {'message': 'Message sent', 'status': 'success'}
[ "def", "pipe_to_process", "(", "self", ",", "payload", ")", ":", "message", "=", "payload", "[", "'input'", "]", "key", "=", "payload", "[", "'key'", "]", "if", "not", "self", ".", "process_handler", ".", "is_running", "(", "key", ")", ":", "return", "{", "'message'", ":", "'No running process for this key'", ",", "'status'", ":", "'error'", "}", "self", ".", "process_handler", ".", "send_to_process", "(", "message", ",", "key", ")", "return", "{", "'message'", ":", "'Message sent'", ",", "'status'", ":", "'success'", "}" ]
Send something to stdin of a specific process.
[ "Send", "something", "to", "stdin", "of", "a", "specific", "process", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L308-L317
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.send_status
def send_status(self, payload): """Send the daemon status and the current queue for displaying.""" answer = {} data = [] # Get daemon status if self.paused: answer['status'] = 'paused' else: answer['status'] = 'running' # Add current queue or a message, that queue is empty if len(self.queue) > 0: data = deepcopy(self.queue.queue) # Remove stderr and stdout output for transfer # Some outputs are way to big for the socket buffer # and this is not needed by the client for key, item in data.items(): if 'stderr' in item: del item['stderr'] if 'stdout' in item: del item['stdout'] else: data = 'Queue is empty' answer['data'] = data return answer
python
def send_status(self, payload): """Send the daemon status and the current queue for displaying.""" answer = {} data = [] # Get daemon status if self.paused: answer['status'] = 'paused' else: answer['status'] = 'running' # Add current queue or a message, that queue is empty if len(self.queue) > 0: data = deepcopy(self.queue.queue) # Remove stderr and stdout output for transfer # Some outputs are way to big for the socket buffer # and this is not needed by the client for key, item in data.items(): if 'stderr' in item: del item['stderr'] if 'stdout' in item: del item['stdout'] else: data = 'Queue is empty' answer['data'] = data return answer
[ "def", "send_status", "(", "self", ",", "payload", ")", ":", "answer", "=", "{", "}", "data", "=", "[", "]", "# Get daemon status", "if", "self", ".", "paused", ":", "answer", "[", "'status'", "]", "=", "'paused'", "else", ":", "answer", "[", "'status'", "]", "=", "'running'", "# Add current queue or a message, that queue is empty", "if", "len", "(", "self", ".", "queue", ")", ">", "0", ":", "data", "=", "deepcopy", "(", "self", ".", "queue", ".", "queue", ")", "# Remove stderr and stdout output for transfer", "# Some outputs are way to big for the socket buffer", "# and this is not needed by the client", "for", "key", ",", "item", "in", "data", ".", "items", "(", ")", ":", "if", "'stderr'", "in", "item", ":", "del", "item", "[", "'stderr'", "]", "if", "'stdout'", "in", "item", ":", "del", "item", "[", "'stdout'", "]", "else", ":", "data", "=", "'Queue is empty'", "answer", "[", "'data'", "]", "=", "data", "return", "answer" ]
Send the daemon status and the current queue for displaying.
[ "Send", "the", "daemon", "status", "and", "the", "current", "queue", "for", "displaying", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L319-L344
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.reset_everything
def reset_everything(self, payload): """Kill all processes, delete the queue and clean everything up.""" kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.process_handler.wait_for_finish() self.reset = True answer = {'message': 'Resetting current queue', 'status': 'success'} return answer
python
def reset_everything(self, payload): """Kill all processes, delete the queue and clean everything up.""" kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.process_handler.wait_for_finish() self.reset = True answer = {'message': 'Resetting current queue', 'status': 'success'} return answer
[ "def", "reset_everything", "(", "self", ",", "payload", ")", ":", "kill_signal", "=", "signals", "[", "'9'", "]", "self", ".", "process_handler", ".", "kill_all", "(", "kill_signal", ",", "True", ")", "self", ".", "process_handler", ".", "wait_for_finish", "(", ")", "self", ".", "reset", "=", "True", "answer", "=", "{", "'message'", ":", "'Resetting current queue'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Kill all processes, delete the queue and clean everything up.
[ "Kill", "all", "processes", "delete", "the", "queue", "and", "clean", "everything", "up", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L346-L354
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.clear
def clear(self, payload): """Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes. """ self.logger.rotate(self.queue) self.queue.clear() self.logger.write(self.queue) answer = {'message': 'Finished entries have been removed.', 'status': 'success'} return answer
python
def clear(self, payload): """Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes. """ self.logger.rotate(self.queue) self.queue.clear() self.logger.write(self.queue) answer = {'message': 'Finished entries have been removed.', 'status': 'success'} return answer
[ "def", "clear", "(", "self", ",", "payload", ")", ":", "self", ".", "logger", ".", "rotate", "(", "self", ".", "queue", ")", "self", ".", "queue", ".", "clear", "(", ")", "self", ".", "logger", ".", "write", "(", "self", ".", "queue", ")", "answer", "=", "{", "'message'", ":", "'Finished entries have been removed.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes.
[ "Clear", "queue", "from", "any", "done", "or", "failed", "entries", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L356-L367
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.start
def start(self, payload): """Start the daemon and all processes or only specific processes.""" # Start specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.start_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Started processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo paused, queued or stashed process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Start a all processes and the daemon else: self.process_handler.start_all() if self.paused: self.paused = False answer = {'message': 'Daemon and all processes started.', 'status': 'success'} else: answer = {'message': 'Daemon already running, starting all processes.', 'status': 'success'} return answer
python
def start(self, payload): """Start the daemon and all processes or only specific processes.""" # Start specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.start_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Started processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo paused, queued or stashed process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Start a all processes and the daemon else: self.process_handler.start_all() if self.paused: self.paused = False answer = {'message': 'Daemon and all processes started.', 'status': 'success'} else: answer = {'message': 'Daemon already running, starting all processes.', 'status': 'success'} return answer
[ "def", "start", "(", "self", ",", "payload", ")", ":", "# Start specific processes, if `keys` is given in the payload", "if", "payload", ".", "get", "(", "'keys'", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", ".", "get", "(", "'keys'", ")", ":", "success", "=", "self", ".", "process_handler", ".", "start_process", "(", "key", ")", "if", "success", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Started processes: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo paused, queued or stashed process for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "# Start a all processes and the daemon", "else", ":", "self", ".", "process_handler", ".", "start_all", "(", ")", "if", "self", ".", "paused", ":", "self", ".", "paused", "=", "False", "answer", "=", "{", "'message'", ":", "'Daemon and all processes started.'", ",", "'status'", ":", "'success'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "'Daemon already running, starting all processes.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Start the daemon and all processes or only specific processes.
[ "Start", "the", "daemon", "and", "all", "processes", "or", "only", "specific", "processes", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L369-L402
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.pause
def pause(self, payload): """Start the daemon and all processes or only specific processes.""" # Pause specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.pause_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Paused processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Pause all processes and the daemon else: if payload.get('wait'): self.paused = True answer = {'message': 'Pausing daemon, but waiting for processes to finish.', 'status': 'success'} else: self.process_handler.pause_all() if not self.paused: self.paused = True answer = {'message': 'Daemon and all processes paused.', 'status': 'success'} else: answer = {'message': 'Daemon already paused, pausing all processes anyway.', 'status': 'success'} return answer
python
def pause(self, payload): """Start the daemon and all processes or only specific processes.""" # Pause specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.pause_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Paused processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Pause all processes and the daemon else: if payload.get('wait'): self.paused = True answer = {'message': 'Pausing daemon, but waiting for processes to finish.', 'status': 'success'} else: self.process_handler.pause_all() if not self.paused: self.paused = True answer = {'message': 'Daemon and all processes paused.', 'status': 'success'} else: answer = {'message': 'Daemon already paused, pausing all processes anyway.', 'status': 'success'} return answer
[ "def", "pause", "(", "self", ",", "payload", ")", ":", "# Pause specific processes, if `keys` is given in the payload", "if", "payload", ".", "get", "(", "'keys'", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", ".", "get", "(", "'keys'", ")", ":", "success", "=", "self", ".", "process_handler", ".", "pause_process", "(", "key", ")", "if", "success", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Paused processes: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo running process for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "# Pause all processes and the daemon", "else", ":", "if", "payload", ".", "get", "(", "'wait'", ")", ":", "self", ".", "paused", "=", "True", "answer", "=", "{", "'message'", ":", "'Pausing daemon, but waiting for processes to finish.'", ",", "'status'", ":", "'success'", "}", "else", ":", "self", ".", "process_handler", ".", "pause_all", "(", ")", "if", "not", "self", ".", "paused", ":", "self", ".", "paused", "=", "True", "answer", "=", "{", "'message'", ":", "'Daemon and all processes paused.'", ",", "'status'", ":", "'success'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "'Daemon already paused, pausing all processes anyway.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Start the daemon and all processes or only specific processes.
[ "Start", "the", "daemon", "and", "all", "processes", "or", "only", "specific", "processes", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L404-L443
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.edit_command
def edit_command(self, payload): """Edit the command of a specific entry.""" key = payload['key'] command = payload['command'] if self.queue[key]: if self.queue[key]['status'] in ['queued', 'stashed']: self.queue[key]['command'] = command answer = {'message': 'Command updated', 'status': 'error'} else: answer = {'message': "Entry is not 'queued' or 'stashed'", 'status': 'error'} else: answer = {'message': 'No entry with this key', 'status': 'error'} # Pause all processes and the daemon return answer
python
def edit_command(self, payload): """Edit the command of a specific entry.""" key = payload['key'] command = payload['command'] if self.queue[key]: if self.queue[key]['status'] in ['queued', 'stashed']: self.queue[key]['command'] = command answer = {'message': 'Command updated', 'status': 'error'} else: answer = {'message': "Entry is not 'queued' or 'stashed'", 'status': 'error'} else: answer = {'message': 'No entry with this key', 'status': 'error'} # Pause all processes and the daemon return answer
[ "def", "edit_command", "(", "self", ",", "payload", ")", ":", "key", "=", "payload", "[", "'key'", "]", "command", "=", "payload", "[", "'command'", "]", "if", "self", ".", "queue", "[", "key", "]", ":", "if", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "in", "[", "'queued'", ",", "'stashed'", "]", ":", "self", ".", "queue", "[", "key", "]", "[", "'command'", "]", "=", "command", "answer", "=", "{", "'message'", ":", "'Command updated'", ",", "'status'", ":", "'error'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "\"Entry is not 'queued' or 'stashed'\"", ",", "'status'", ":", "'error'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "'No entry with this key'", ",", "'status'", ":", "'error'", "}", "# Pause all processes and the daemon", "return", "answer" ]
Edit the command of a specific entry.
[ "Edit", "the", "command", "of", "a", "specific", "entry", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L445-L460
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.stash
def stash(self, payload): """Stash the specified processes.""" succeeded = [] failed = [] for key in payload['keys']: if self.queue.get(key) is not None: if self.queue[key]['status'] == 'queued': self.queue[key]['status'] = 'stashed' succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Stashed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo queued entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
python
def stash(self, payload): """Stash the specified processes.""" succeeded = [] failed = [] for key in payload['keys']: if self.queue.get(key) is not None: if self.queue[key]['status'] == 'queued': self.queue[key]['status'] = 'stashed' succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Stashed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo queued entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
[ "def", "stash", "(", "self", ",", "payload", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", "[", "'keys'", "]", ":", "if", "self", ".", "queue", ".", "get", "(", "key", ")", "is", "not", "None", ":", "if", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "==", "'queued'", ":", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'stashed'", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Stashed entries: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo queued entry for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "return", "answer" ]
Stash the specified processes.
[ "Stash", "the", "specified", "processes", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L462-L486
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.kill_process
def kill_process(self, payload): """Pause the daemon and kill all processes or kill a specific process.""" # Kill specific processes, if `keys` is given in the payload kill_signal = signals[payload['signal'].lower()] kill_shell = payload.get('all', False) if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.kill_process(key, kill_signal, kill_shell) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += "Signal '{}' sent to processes: {}.".format(payload['signal'], ', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Kill all processes and the daemon else: self.process_handler.kill_all(kill_signal, kill_shell) if kill_signal == signal.SIGINT or \ kill_signal == signal.SIGTERM or \ kill_signal == signal.SIGKILL: self.paused = True answer = {'message': 'Signal send to all processes.', 'status': 'success'} return answer
python
def kill_process(self, payload): """Pause the daemon and kill all processes or kill a specific process.""" # Kill specific processes, if `keys` is given in the payload kill_signal = signals[payload['signal'].lower()] kill_shell = payload.get('all', False) if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.kill_process(key, kill_signal, kill_shell) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += "Signal '{}' sent to processes: {}.".format(payload['signal'], ', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Kill all processes and the daemon else: self.process_handler.kill_all(kill_signal, kill_shell) if kill_signal == signal.SIGINT or \ kill_signal == signal.SIGTERM or \ kill_signal == signal.SIGKILL: self.paused = True answer = {'message': 'Signal send to all processes.', 'status': 'success'} return answer
[ "def", "kill_process", "(", "self", ",", "payload", ")", ":", "# Kill specific processes, if `keys` is given in the payload", "kill_signal", "=", "signals", "[", "payload", "[", "'signal'", "]", ".", "lower", "(", ")", "]", "kill_shell", "=", "payload", ".", "get", "(", "'all'", ",", "False", ")", "if", "payload", ".", "get", "(", "'keys'", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", ".", "get", "(", "'keys'", ")", ":", "success", "=", "self", ".", "process_handler", ".", "kill_process", "(", "key", ",", "kill_signal", ",", "kill_shell", ")", "if", "success", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "\"Signal '{}' sent to processes: {}.\"", ".", "format", "(", "payload", "[", "'signal'", "]", ",", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo running process for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "# Kill all processes and the daemon", "else", ":", "self", ".", "process_handler", ".", "kill_all", "(", "kill_signal", ",", "kill_shell", ")", "if", "kill_signal", "==", "signal", ".", "SIGINT", "or", "kill_signal", "==", "signal", ".", "SIGTERM", "or", "kill_signal", "==", "signal", ".", "SIGKILL", ":", "self", ".", "paused", "=", "True", "answer", "=", "{", "'message'", ":", "'Signal send to all processes.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Pause the daemon and kill all processes or kill a specific process.
[ "Pause", "the", "daemon", "and", "kill", "all", "processes", "or", "kill", "a", "specific", "process", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L514-L548
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.remove
def remove(self, payload): """Remove specified entries from the queue.""" succeeded = [] failed = [] for key in payload['keys']: running = self.process_handler.is_running(key) if not running: removed = self.queue.remove(key) if removed: succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Removed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nRunning or non-existing entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
python
def remove(self, payload): """Remove specified entries from the queue.""" succeeded = [] failed = [] for key in payload['keys']: running = self.process_handler.is_running(key) if not running: removed = self.queue.remove(key) if removed: succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Removed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nRunning or non-existing entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
[ "def", "remove", "(", "self", ",", "payload", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", "[", "'keys'", "]", ":", "running", "=", "self", ".", "process_handler", ".", "is_running", "(", "key", ")", "if", "not", "running", ":", "removed", "=", "self", ".", "queue", ".", "remove", "(", "key", ")", "if", "removed", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Removed entries: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nRunning or non-existing entry for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "return", "answer" ]
Remove specified entries from the queue.
[ "Remove", "specified", "entries", "from", "the", "queue", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L555-L580
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.switch
def switch(self, payload): """Switch the two specified entry positions in the queue.""" first = payload['first'] second = payload['second'] running = self.process_handler.is_running(first) or self.process_handler.is_running(second) if running: answer = { 'message': "Can't switch running processes, " "please stop the processes before switching them.", 'status': 'error' } else: switched = self.queue.switch(first, second) if switched: answer = { 'message': 'Entries #{} and #{} switched'.format(first, second), 'status': 'success' } else: answer = {'message': "One or both entries do not exist or are not queued/stashed.", 'status': 'error'} return answer
python
def switch(self, payload): """Switch the two specified entry positions in the queue.""" first = payload['first'] second = payload['second'] running = self.process_handler.is_running(first) or self.process_handler.is_running(second) if running: answer = { 'message': "Can't switch running processes, " "please stop the processes before switching them.", 'status': 'error' } else: switched = self.queue.switch(first, second) if switched: answer = { 'message': 'Entries #{} and #{} switched'.format(first, second), 'status': 'success' } else: answer = {'message': "One or both entries do not exist or are not queued/stashed.", 'status': 'error'} return answer
[ "def", "switch", "(", "self", ",", "payload", ")", ":", "first", "=", "payload", "[", "'first'", "]", "second", "=", "payload", "[", "'second'", "]", "running", "=", "self", ".", "process_handler", ".", "is_running", "(", "first", ")", "or", "self", ".", "process_handler", ".", "is_running", "(", "second", ")", "if", "running", ":", "answer", "=", "{", "'message'", ":", "\"Can't switch running processes, \"", "\"please stop the processes before switching them.\"", ",", "'status'", ":", "'error'", "}", "else", ":", "switched", "=", "self", ".", "queue", ".", "switch", "(", "first", ",", "second", ")", "if", "switched", ":", "answer", "=", "{", "'message'", ":", "'Entries #{} and #{} switched'", ".", "format", "(", "first", ",", "second", ")", ",", "'status'", ":", "'success'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "\"One or both entries do not exist or are not queued/stashed.\"", ",", "'status'", ":", "'error'", "}", "return", "answer" ]
Switch the two specified entry positions in the queue.
[ "Switch", "the", "two", "specified", "entry", "positions", "in", "the", "queue", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L582-L604
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.restart
def restart(self, payload): """Restart the specified entries.""" succeeded = [] failed = [] for key in payload['keys']: restarted = self.queue.restart(key) if restarted: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Restarted entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo finished entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
python
def restart(self, payload): """Restart the specified entries.""" succeeded = [] failed = [] for key in payload['keys']: restarted = self.queue.restart(key) if restarted: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Restarted entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo finished entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
[ "def", "restart", "(", "self", ",", "payload", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", "[", "'keys'", "]", ":", "restarted", "=", "self", ".", "queue", ".", "restart", "(", "key", ")", "if", "restarted", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Restarted entries: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo finished entry for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "return", "answer" ]
Restart the specified entries.
[ "Restart", "the", "specified", "entries", "." ]
train
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L606-L626
dlecocq/nsq-py
nsq/sockets/base.py
SocketWrapper.sendall
def sendall(self, data, flags=0): '''Same as socket.sendall''' count = len(data) while count: sent = self.send(data, flags) # This could probably be a buffer object data = data[sent:] count -= sent
python
def sendall(self, data, flags=0): '''Same as socket.sendall''' count = len(data) while count: sent = self.send(data, flags) # This could probably be a buffer object data = data[sent:] count -= sent
[ "def", "sendall", "(", "self", ",", "data", ",", "flags", "=", "0", ")", ":", "count", "=", "len", "(", "data", ")", "while", "count", ":", "sent", "=", "self", ".", "send", "(", "data", ",", "flags", ")", "# This could probably be a buffer object", "data", "=", "data", "[", "sent", ":", "]", "count", "-=", "sent" ]
Same as socket.sendall
[ "Same", "as", "socket", ".", "sendall" ]
train
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/sockets/base.py#L31-L38
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_ls
def do_ls(client, args): """List directory""" for item in client.get_folder_contents_iter(args.uri): # privacy flag if item['privacy'] == 'public': item['pf'] = '@' else: item['pf'] = '-' if isinstance(item, Folder): # type flag item['tf'] = 'd' item['key'] = item['folderkey'] item['size'] = '' else: item['tf'] = '-' item['key'] = item['quickkey'] item['name'] = item['filename'] print("{tf}{pf} {key:>15} {size:>10} {created} {name}".format(**item)) return True
python
def do_ls(client, args): """List directory""" for item in client.get_folder_contents_iter(args.uri): # privacy flag if item['privacy'] == 'public': item['pf'] = '@' else: item['pf'] = '-' if isinstance(item, Folder): # type flag item['tf'] = 'd' item['key'] = item['folderkey'] item['size'] = '' else: item['tf'] = '-' item['key'] = item['quickkey'] item['name'] = item['filename'] print("{tf}{pf} {key:>15} {size:>10} {created} {name}".format(**item)) return True
[ "def", "do_ls", "(", "client", ",", "args", ")", ":", "for", "item", "in", "client", ".", "get_folder_contents_iter", "(", "args", ".", "uri", ")", ":", "# privacy flag", "if", "item", "[", "'privacy'", "]", "==", "'public'", ":", "item", "[", "'pf'", "]", "=", "'@'", "else", ":", "item", "[", "'pf'", "]", "=", "'-'", "if", "isinstance", "(", "item", ",", "Folder", ")", ":", "# type flag", "item", "[", "'tf'", "]", "=", "'d'", "item", "[", "'key'", "]", "=", "item", "[", "'folderkey'", "]", "item", "[", "'size'", "]", "=", "''", "else", ":", "item", "[", "'tf'", "]", "=", "'-'", "item", "[", "'key'", "]", "=", "item", "[", "'quickkey'", "]", "item", "[", "'name'", "]", "=", "item", "[", "'filename'", "]", "print", "(", "\"{tf}{pf} {key:>15} {size:>10} {created} {name}\"", ".", "format", "(", "*", "*", "item", ")", ")", "return", "True" ]
List directory
[ "List", "directory" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L28-L50
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_upload
def do_file_upload(client, args): """Upload files""" # Sanity check if len(args.paths) > 1: # destination must be a directory try: resource = client.get_resource_by_uri(args.dest_uri) except ResourceNotFoundError: resource = None if resource and not isinstance(resource, Folder): print("file-upload: " "target '{}' is not a directory".format(args.dest_uri)) return None with client.upload_session(): for src_path in args.paths: print("Uploading {} to {}".format(src_path, args.dest_uri)) result = client.upload_file(src_path, args.dest_uri) print("Uploaded {}, result={}".format(src_path, result)) return True
python
def do_file_upload(client, args): """Upload files""" # Sanity check if len(args.paths) > 1: # destination must be a directory try: resource = client.get_resource_by_uri(args.dest_uri) except ResourceNotFoundError: resource = None if resource and not isinstance(resource, Folder): print("file-upload: " "target '{}' is not a directory".format(args.dest_uri)) return None with client.upload_session(): for src_path in args.paths: print("Uploading {} to {}".format(src_path, args.dest_uri)) result = client.upload_file(src_path, args.dest_uri) print("Uploaded {}, result={}".format(src_path, result)) return True
[ "def", "do_file_upload", "(", "client", ",", "args", ")", ":", "# Sanity check", "if", "len", "(", "args", ".", "paths", ")", ">", "1", ":", "# destination must be a directory", "try", ":", "resource", "=", "client", ".", "get_resource_by_uri", "(", "args", ".", "dest_uri", ")", "except", "ResourceNotFoundError", ":", "resource", "=", "None", "if", "resource", "and", "not", "isinstance", "(", "resource", ",", "Folder", ")", ":", "print", "(", "\"file-upload: \"", "\"target '{}' is not a directory\"", ".", "format", "(", "args", ".", "dest_uri", ")", ")", "return", "None", "with", "client", ".", "upload_session", "(", ")", ":", "for", "src_path", "in", "args", ".", "paths", ":", "print", "(", "\"Uploading {} to {}\"", ".", "format", "(", "src_path", ",", "args", ".", "dest_uri", ")", ")", "result", "=", "client", ".", "upload_file", "(", "src_path", ",", "args", ".", "dest_uri", ")", "print", "(", "\"Uploaded {}, result={}\"", ".", "format", "(", "src_path", ",", "result", ")", ")", "return", "True" ]
Upload files
[ "Upload", "files" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L53-L76
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_download
def do_file_download(client, args): """Download file""" # Sanity check if not os.path.isdir(args.dest_path) and not args.dest_path.endswith('/'): print("file-download: " "target '{}' is not a directory".format(args.dest_path)) if not os.path.exists(args.dest_path): print("\tHint: add trailing / to create one") return None for src_uri in args.uris: print("Downloading {} to {}".format(src_uri, args.dest_path)) client.download_file(src_uri, args.dest_path) print("Downloaded {}".format(src_uri)) return True
python
def do_file_download(client, args): """Download file""" # Sanity check if not os.path.isdir(args.dest_path) and not args.dest_path.endswith('/'): print("file-download: " "target '{}' is not a directory".format(args.dest_path)) if not os.path.exists(args.dest_path): print("\tHint: add trailing / to create one") return None for src_uri in args.uris: print("Downloading {} to {}".format(src_uri, args.dest_path)) client.download_file(src_uri, args.dest_path) print("Downloaded {}".format(src_uri)) return True
[ "def", "do_file_download", "(", "client", ",", "args", ")", ":", "# Sanity check", "if", "not", "os", ".", "path", ".", "isdir", "(", "args", ".", "dest_path", ")", "and", "not", "args", ".", "dest_path", ".", "endswith", "(", "'/'", ")", ":", "print", "(", "\"file-download: \"", "\"target '{}' is not a directory\"", ".", "format", "(", "args", ".", "dest_path", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "args", ".", "dest_path", ")", ":", "print", "(", "\"\\tHint: add trailing / to create one\"", ")", "return", "None", "for", "src_uri", "in", "args", ".", "uris", ":", "print", "(", "\"Downloading {} to {}\"", ".", "format", "(", "src_uri", ",", "args", ".", "dest_path", ")", ")", "client", ".", "download_file", "(", "src_uri", ",", "args", ".", "dest_path", ")", "print", "(", "\"Downloaded {}\"", ".", "format", "(", "src_uri", ")", ")", "return", "True" ]
Download file
[ "Download", "file" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L79-L95
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_show
def do_file_show(client, args): """Output file contents to stdout""" for src_uri in args.uris: client.download_file(src_uri, sys.stdout.buffer) return True
python
def do_file_show(client, args): """Output file contents to stdout""" for src_uri in args.uris: client.download_file(src_uri, sys.stdout.buffer) return True
[ "def", "do_file_show", "(", "client", ",", "args", ")", ":", "for", "src_uri", "in", "args", ".", "uris", ":", "client", ".", "download_file", "(", "src_uri", ",", "sys", ".", "stdout", ".", "buffer", ")", "return", "True" ]
Output file contents to stdout
[ "Output", "file", "contents", "to", "stdout" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L98-L103
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_folder_create
def do_folder_create(client, args): """Create directory""" for folder_uri in args.uris: client.create_folder(folder_uri, recursive=True) return True
python
def do_folder_create(client, args): """Create directory""" for folder_uri in args.uris: client.create_folder(folder_uri, recursive=True) return True
[ "def", "do_folder_create", "(", "client", ",", "args", ")", ":", "for", "folder_uri", "in", "args", ".", "uris", ":", "client", ".", "create_folder", "(", "folder_uri", ",", "recursive", "=", "True", ")", "return", "True" ]
Create directory
[ "Create", "directory" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L106-L110
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_resource_delete
def do_resource_delete(client, args): """Remove resource""" for resource_uri in args.uris: client.delete_resource(resource_uri, purge=args.purge) print("Deleted {}".format(resource_uri)) return True
python
def do_resource_delete(client, args): """Remove resource""" for resource_uri in args.uris: client.delete_resource(resource_uri, purge=args.purge) print("Deleted {}".format(resource_uri)) return True
[ "def", "do_resource_delete", "(", "client", ",", "args", ")", ":", "for", "resource_uri", "in", "args", ".", "uris", ":", "client", ".", "delete_resource", "(", "resource_uri", ",", "purge", "=", "args", ".", "purge", ")", "print", "(", "\"Deleted {}\"", ".", "format", "(", "resource_uri", ")", ")", "return", "True" ]
Remove resource
[ "Remove", "resource" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L113-L118
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_update_metadata
def do_file_update_metadata(client, args): """Update file metadata""" client.update_file_metadata(args.uri, filename=args.filename, description=args.description, mtime=args.mtime, privacy=args.privacy) return True
python
def do_file_update_metadata(client, args): """Update file metadata""" client.update_file_metadata(args.uri, filename=args.filename, description=args.description, mtime=args.mtime, privacy=args.privacy) return True
[ "def", "do_file_update_metadata", "(", "client", ",", "args", ")", ":", "client", ".", "update_file_metadata", "(", "args", ".", "uri", ",", "filename", "=", "args", ".", "filename", ",", "description", "=", "args", ".", "description", ",", "mtime", "=", "args", ".", "mtime", ",", "privacy", "=", "args", ".", "privacy", ")", "return", "True" ]
Update file metadata
[ "Update", "file", "metadata" ]
train
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L121-L126