id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
234,600
cs50/check50
check50/api.py
_raw
def _raw(s): """Get raw representation of s, truncating if too long.""" if isinstance(s, list): s = "\n".join(_raw(item) for item in s) if s == EOF: return "EOF" s = repr(s) # Get raw representation of string s = s[1:-1] # Strip away quotation marks if len(s) > 15: s = s[:15] + "..." # Truncate if too long return s
python
def _raw(s): """Get raw representation of s, truncating if too long.""" if isinstance(s, list): s = "\n".join(_raw(item) for item in s) if s == EOF: return "EOF" s = repr(s) # Get raw representation of string s = s[1:-1] # Strip away quotation marks if len(s) > 15: s = s[:15] + "..." # Truncate if too long return s
[ "def", "_raw", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "list", ")", ":", "s", "=", "\"\\n\"", ".", "join", "(", "_raw", "(", "item", ")", "for", "item", "in", "s", ")", "if", "s", "==", "EOF", ":", "return", "\"EOF\"", "s", "=", "repr", "(", "s", ")", "# Get raw representation of string", "s", "=", "s", "[", "1", ":", "-", "1", "]", "# Strip away quotation marks", "if", "len", "(", "s", ")", ">", "15", ":", "s", "=", "s", "[", ":", "15", "]", "+", "\"...\"", "# Truncate if too long", "return", "s" ]
Get raw representation of s, truncating if too long.
[ "Get", "raw", "representation", "of", "s", "truncating", "if", "too", "long", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/api.py#L400-L413
234,601
cs50/check50
check50/api.py
_copy
def _copy(src, dst): """Copy src to dst, copying recursively if src is a directory.""" try: shutil.copy(src, dst) except IsADirectoryError: if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) shutil.copytree(src, dst)
python
def _copy(src, dst): """Copy src to dst, copying recursively if src is a directory.""" try: shutil.copy(src, dst) except IsADirectoryError: if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) shutil.copytree(src, dst)
[ "def", "_copy", "(", "src", ",", "dst", ")", ":", "try", ":", "shutil", ".", "copy", "(", "src", ",", "dst", ")", "except", "IsADirectoryError", ":", "if", "os", ".", "path", ".", "isdir", "(", "dst", ")", ":", "dst", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "os", ".", "path", ".", "basename", "(", "src", ")", ")", "shutil", ".", "copytree", "(", "src", ",", "dst", ")" ]
Copy src to dst, copying recursively if src is a directory.
[ "Copy", "src", "to", "dst", "copying", "recursively", "if", "src", "is", "a", "directory", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/api.py#L416-L423
234,602
cs50/check50
check50/api.py
run.stdin
def stdin(self, line, prompt=True, timeout=3): """ Send line to stdin, optionally expect a prompt. :param line: line to be send to stdin :type line: str :param prompt: boolean indicating whether a prompt is expected, if True absorbs \ all of stdout before inserting line into stdin and raises \ :class:`check50.Failure` if stdout is empty :type prompt: bool :param timeout: maximum number of seconds to wait for prompt :type timeout: int / float :raises check50.Failure: if ``prompt`` is set to True and no prompt is given """ if line == EOF: log("sending EOF...") else: log(_("sending input {}...").format(line)) if prompt: try: self.process.expect(".+", timeout=timeout) except (TIMEOUT, EOF): raise Failure(_("expected prompt for input, found none")) except UnicodeDecodeError: raise Failure(_("output not valid ASCII text")) try: if line == EOF: self.process.sendeof() else: self.process.sendline(line) except OSError: pass return self
python
def stdin(self, line, prompt=True, timeout=3): """ Send line to stdin, optionally expect a prompt. :param line: line to be send to stdin :type line: str :param prompt: boolean indicating whether a prompt is expected, if True absorbs \ all of stdout before inserting line into stdin and raises \ :class:`check50.Failure` if stdout is empty :type prompt: bool :param timeout: maximum number of seconds to wait for prompt :type timeout: int / float :raises check50.Failure: if ``prompt`` is set to True and no prompt is given """ if line == EOF: log("sending EOF...") else: log(_("sending input {}...").format(line)) if prompt: try: self.process.expect(".+", timeout=timeout) except (TIMEOUT, EOF): raise Failure(_("expected prompt for input, found none")) except UnicodeDecodeError: raise Failure(_("output not valid ASCII text")) try: if line == EOF: self.process.sendeof() else: self.process.sendline(line) except OSError: pass return self
[ "def", "stdin", "(", "self", ",", "line", ",", "prompt", "=", "True", ",", "timeout", "=", "3", ")", ":", "if", "line", "==", "EOF", ":", "log", "(", "\"sending EOF...\"", ")", "else", ":", "log", "(", "_", "(", "\"sending input {}...\"", ")", ".", "format", "(", "line", ")", ")", "if", "prompt", ":", "try", ":", "self", ".", "process", ".", "expect", "(", "\".+\"", ",", "timeout", "=", "timeout", ")", "except", "(", "TIMEOUT", ",", "EOF", ")", ":", "raise", "Failure", "(", "_", "(", "\"expected prompt for input, found none\"", ")", ")", "except", "UnicodeDecodeError", ":", "raise", "Failure", "(", "_", "(", "\"output not valid ASCII text\"", ")", ")", "try", ":", "if", "line", "==", "EOF", ":", "self", ".", "process", ".", "sendeof", "(", ")", "else", ":", "self", ".", "process", ".", "sendline", "(", "line", ")", "except", "OSError", ":", "pass", "return", "self" ]
Send line to stdin, optionally expect a prompt. :param line: line to be send to stdin :type line: str :param prompt: boolean indicating whether a prompt is expected, if True absorbs \ all of stdout before inserting line into stdin and raises \ :class:`check50.Failure` if stdout is empty :type prompt: bool :param timeout: maximum number of seconds to wait for prompt :type timeout: int / float :raises check50.Failure: if ``prompt`` is set to True and no prompt is given
[ "Send", "line", "to", "stdin", "optionally", "expect", "a", "prompt", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/api.py#L165-L199
234,603
cs50/check50
check50/api.py
run.reject
def reject(self, timeout=1): """ Check that the process survives for timeout. Useful for checking whether program is waiting on input. :param timeout: number of seconds to wait :type timeout: int / float :raises check50.Failure: if process ends before ``timeout`` """ log(_("checking that input was rejected...")) try: self._wait(timeout) except Failure as e: if not isinstance(e.__cause__, TIMEOUT): raise else: raise Failure(_("expected program to reject input, but it did not")) return self
python
def reject(self, timeout=1): """ Check that the process survives for timeout. Useful for checking whether program is waiting on input. :param timeout: number of seconds to wait :type timeout: int / float :raises check50.Failure: if process ends before ``timeout`` """ log(_("checking that input was rejected...")) try: self._wait(timeout) except Failure as e: if not isinstance(e.__cause__, TIMEOUT): raise else: raise Failure(_("expected program to reject input, but it did not")) return self
[ "def", "reject", "(", "self", ",", "timeout", "=", "1", ")", ":", "log", "(", "_", "(", "\"checking that input was rejected...\"", ")", ")", "try", ":", "self", ".", "_wait", "(", "timeout", ")", "except", "Failure", "as", "e", ":", "if", "not", "isinstance", "(", "e", ".", "__cause__", ",", "TIMEOUT", ")", ":", "raise", "else", ":", "raise", "Failure", "(", "_", "(", "\"expected program to reject input, but it did not\"", ")", ")", "return", "self" ]
Check that the process survives for timeout. Useful for checking whether program is waiting on input. :param timeout: number of seconds to wait :type timeout: int / float :raises check50.Failure: if process ends before ``timeout``
[ "Check", "that", "the", "process", "survives", "for", "timeout", ".", "Useful", "for", "checking", "whether", "program", "is", "waiting", "on", "input", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/api.py#L269-L286
234,604
cs50/check50
check50/internal.py
import_file
def import_file(name, path): """ Import a file given a raw file path. :param name: Name of module to be imported :type name: str :param path: Path to Python file :type path: str / Path """ spec = importlib.util.spec_from_file_location(name, path) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) return mod
python
def import_file(name, path): """ Import a file given a raw file path. :param name: Name of module to be imported :type name: str :param path: Path to Python file :type path: str / Path """ spec = importlib.util.spec_from_file_location(name, path) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) return mod
[ "def", "import_file", "(", "name", ",", "path", ")", ":", "spec", "=", "importlib", ".", "util", ".", "spec_from_file_location", "(", "name", ",", "path", ")", "mod", "=", "importlib", ".", "util", ".", "module_from_spec", "(", "spec", ")", "spec", ".", "loader", ".", "exec_module", "(", "mod", ")", "return", "mod" ]
Import a file given a raw file path. :param name: Name of module to be imported :type name: str :param path: Path to Python file :type path: str / Path
[ "Import", "a", "file", "given", "a", "raw", "file", "path", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/internal.py#L108-L120
234,605
cs50/check50
check50/c.py
compile
def compile(*files, exe_name=None, cc=CC, **cflags): """ Compile C source files. :param files: filenames to be compiled :param exe_name: name of resulting executable :param cc: compiler to use (:data:`check50.c.CC` by default) :param cflags: additional flags to pass to the compiler :raises check50.Failure: if compilation failed (i.e., if the compiler returns a non-zero exit status). :raises RuntimeError: if no filenames are specified If ``exe_name`` is None, :func:`check50.c.compile` will default to the first file specified sans the ``.c`` extension:: check50.c.compile("foo.c", "bar.c") # clang foo.c bar.c -o foo -std=c11 -ggdb -lm Additional CFLAGS may be passed as keyword arguments like so:: check50.c.compile("foo.c", "bar.c", lcs50=True) # clang foo.c bar.c -o foo -std=c11 -ggdb -lm -lcs50 In the same vein, the default CFLAGS may be overriden via keyword arguments:: check50.c.compile("foo.c", "bar.c", std="c99", lm=False) # clang foo.c bar.c -o foo -std=c99 -ggdb """ if not files: raise RuntimeError(_("compile requires at least one file")) if exe_name is None and files[0].endswith(".c"): exe_name = Path(files[0]).stem files = " ".join(files) flags = CFLAGS.copy() flags.update(cflags) flags = " ".join((f"-{flag}" + (f"={value}" if value is not True else "")).replace("_", "-") for flag, value in flags.items() if value) out_flag = f" -o {exe_name} " if exe_name is not None else " " run(f"{cc} {files}{out_flag}{flags}").exit(0)
python
def compile(*files, exe_name=None, cc=CC, **cflags): """ Compile C source files. :param files: filenames to be compiled :param exe_name: name of resulting executable :param cc: compiler to use (:data:`check50.c.CC` by default) :param cflags: additional flags to pass to the compiler :raises check50.Failure: if compilation failed (i.e., if the compiler returns a non-zero exit status). :raises RuntimeError: if no filenames are specified If ``exe_name`` is None, :func:`check50.c.compile` will default to the first file specified sans the ``.c`` extension:: check50.c.compile("foo.c", "bar.c") # clang foo.c bar.c -o foo -std=c11 -ggdb -lm Additional CFLAGS may be passed as keyword arguments like so:: check50.c.compile("foo.c", "bar.c", lcs50=True) # clang foo.c bar.c -o foo -std=c11 -ggdb -lm -lcs50 In the same vein, the default CFLAGS may be overriden via keyword arguments:: check50.c.compile("foo.c", "bar.c", std="c99", lm=False) # clang foo.c bar.c -o foo -std=c99 -ggdb """ if not files: raise RuntimeError(_("compile requires at least one file")) if exe_name is None and files[0].endswith(".c"): exe_name = Path(files[0]).stem files = " ".join(files) flags = CFLAGS.copy() flags.update(cflags) flags = " ".join((f"-{flag}" + (f"={value}" if value is not True else "")).replace("_", "-") for flag, value in flags.items() if value) out_flag = f" -o {exe_name} " if exe_name is not None else " " run(f"{cc} {files}{out_flag}{flags}").exit(0)
[ "def", "compile", "(", "*", "files", ",", "exe_name", "=", "None", ",", "cc", "=", "CC", ",", "*", "*", "cflags", ")", ":", "if", "not", "files", ":", "raise", "RuntimeError", "(", "_", "(", "\"compile requires at least one file\"", ")", ")", "if", "exe_name", "is", "None", "and", "files", "[", "0", "]", ".", "endswith", "(", "\".c\"", ")", ":", "exe_name", "=", "Path", "(", "files", "[", "0", "]", ")", ".", "stem", "files", "=", "\" \"", ".", "join", "(", "files", ")", "flags", "=", "CFLAGS", ".", "copy", "(", ")", "flags", ".", "update", "(", "cflags", ")", "flags", "=", "\" \"", ".", "join", "(", "(", "f\"-{flag}\"", "+", "(", "f\"={value}\"", "if", "value", "is", "not", "True", "else", "\"\"", ")", ")", ".", "replace", "(", "\"_\"", ",", "\"-\"", ")", "for", "flag", ",", "value", "in", "flags", ".", "items", "(", ")", "if", "value", ")", "out_flag", "=", "f\" -o {exe_name} \"", "if", "exe_name", "is", "not", "None", "else", "\" \"", "run", "(", "f\"{cc} {files}{out_flag}{flags}\"", ")", ".", "exit", "(", "0", ")" ]
Compile C source files. :param files: filenames to be compiled :param exe_name: name of resulting executable :param cc: compiler to use (:data:`check50.c.CC` by default) :param cflags: additional flags to pass to the compiler :raises check50.Failure: if compilation failed (i.e., if the compiler returns a non-zero exit status). :raises RuntimeError: if no filenames are specified If ``exe_name`` is None, :func:`check50.c.compile` will default to the first file specified sans the ``.c`` extension:: check50.c.compile("foo.c", "bar.c") # clang foo.c bar.c -o foo -std=c11 -ggdb -lm Additional CFLAGS may be passed as keyword arguments like so:: check50.c.compile("foo.c", "bar.c", lcs50=True) # clang foo.c bar.c -o foo -std=c11 -ggdb -lm -lcs50 In the same vein, the default CFLAGS may be overriden via keyword arguments:: check50.c.compile("foo.c", "bar.c", std="c99", lm=False) # clang foo.c bar.c -o foo -std=c99 -ggdb
[ "Compile", "C", "source", "files", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/c.py#L16-L57
234,606
cs50/check50
check50/c.py
valgrind
def valgrind(command, env={}): """Run a command with valgrind. :param command: command to be run :type command: str :param env: environment in which to run command :type env: str :raises check50.Failure: if, at the end of the check, valgrind reports any errors This function works exactly like :func:`check50.run`, with the additional effect that ``command`` is run through ``valgrind`` and ``valgrind``'s output is automatically reviewed at the end of the check for memory leaks and other bugs. If ``valgrind`` reports any issues, the check is failed and student-friendly messages are printed to the log. Example usage:: check50.c.valgrind("./leaky").stdin("foo").stdout("bar").exit(0) .. note:: It is recommended that the student's code is compiled with the `-ggdb` flag so that additional information, such as the file and line number at which the issue was detected can be included in the log as well. """ xml_file = tempfile.NamedTemporaryFile() internal.register.after_check(lambda: _check_valgrind(xml_file)) # Ideally we'd like for this whole command not to be logged. return run(f"valgrind --show-leak-kinds=all --xml=yes --xml-file={xml_file.name} -- {command}", env=env)
python
def valgrind(command, env={}): """Run a command with valgrind. :param command: command to be run :type command: str :param env: environment in which to run command :type env: str :raises check50.Failure: if, at the end of the check, valgrind reports any errors This function works exactly like :func:`check50.run`, with the additional effect that ``command`` is run through ``valgrind`` and ``valgrind``'s output is automatically reviewed at the end of the check for memory leaks and other bugs. If ``valgrind`` reports any issues, the check is failed and student-friendly messages are printed to the log. Example usage:: check50.c.valgrind("./leaky").stdin("foo").stdout("bar").exit(0) .. note:: It is recommended that the student's code is compiled with the `-ggdb` flag so that additional information, such as the file and line number at which the issue was detected can be included in the log as well. """ xml_file = tempfile.NamedTemporaryFile() internal.register.after_check(lambda: _check_valgrind(xml_file)) # Ideally we'd like for this whole command not to be logged. return run(f"valgrind --show-leak-kinds=all --xml=yes --xml-file={xml_file.name} -- {command}", env=env)
[ "def", "valgrind", "(", "command", ",", "env", "=", "{", "}", ")", ":", "xml_file", "=", "tempfile", ".", "NamedTemporaryFile", "(", ")", "internal", ".", "register", ".", "after_check", "(", "lambda", ":", "_check_valgrind", "(", "xml_file", ")", ")", "# Ideally we'd like for this whole command not to be logged.", "return", "run", "(", "f\"valgrind --show-leak-kinds=all --xml=yes --xml-file={xml_file.name} -- {command}\"", ",", "env", "=", "env", ")" ]
Run a command with valgrind. :param command: command to be run :type command: str :param env: environment in which to run command :type env: str :raises check50.Failure: if, at the end of the check, valgrind reports any errors This function works exactly like :func:`check50.run`, with the additional effect that ``command`` is run through ``valgrind`` and ``valgrind``'s output is automatically reviewed at the end of the check for memory leaks and other bugs. If ``valgrind`` reports any issues, the check is failed and student-friendly messages are printed to the log. Example usage:: check50.c.valgrind("./leaky").stdin("foo").stdout("bar").exit(0) .. note:: It is recommended that the student's code is compiled with the `-ggdb` flag so that additional information, such as the file and line number at which the issue was detected can be included in the log as well.
[ "Run", "a", "command", "with", "valgrind", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/c.py#L60-L86
234,607
cs50/check50
check50/c.py
_check_valgrind
def _check_valgrind(xml_file): """Log and report any errors encountered by valgrind.""" log(_("checking for valgrind errors...")) # Load XML file created by valgrind xml = ET.ElementTree(file=xml_file) # Ensure that we don't get duplicate error messages. reported = set() for error in xml.iterfind("error"): # Type of error valgrind encountered kind = error.find("kind").text # Valgrind's error message what = error.find("xwhat/text" if kind.startswith("Leak_") else "what").text # Error message that we will report msg = ["\t", what] # Find first stack frame within student's code. for frame in error.iterfind("stack/frame"): obj = frame.find("obj") if obj is not None and internal.run_dir in Path(obj.text).parents: file, line = frame.find("file"), frame.find("line") if file is not None and line is not None: msg.append(f": ({_('file')}: {file.text}, {_('line')}: {line.text})") break msg = "".join(msg) if msg not in reported: log(msg) reported.add(msg) # Only raise exception if we encountered errors. if reported: raise Failure(_("valgrind tests failed; rerun with --log for more information."))
python
def _check_valgrind(xml_file): """Log and report any errors encountered by valgrind.""" log(_("checking for valgrind errors...")) # Load XML file created by valgrind xml = ET.ElementTree(file=xml_file) # Ensure that we don't get duplicate error messages. reported = set() for error in xml.iterfind("error"): # Type of error valgrind encountered kind = error.find("kind").text # Valgrind's error message what = error.find("xwhat/text" if kind.startswith("Leak_") else "what").text # Error message that we will report msg = ["\t", what] # Find first stack frame within student's code. for frame in error.iterfind("stack/frame"): obj = frame.find("obj") if obj is not None and internal.run_dir in Path(obj.text).parents: file, line = frame.find("file"), frame.find("line") if file is not None and line is not None: msg.append(f": ({_('file')}: {file.text}, {_('line')}: {line.text})") break msg = "".join(msg) if msg not in reported: log(msg) reported.add(msg) # Only raise exception if we encountered errors. if reported: raise Failure(_("valgrind tests failed; rerun with --log for more information."))
[ "def", "_check_valgrind", "(", "xml_file", ")", ":", "log", "(", "_", "(", "\"checking for valgrind errors...\"", ")", ")", "# Load XML file created by valgrind", "xml", "=", "ET", ".", "ElementTree", "(", "file", "=", "xml_file", ")", "# Ensure that we don't get duplicate error messages.", "reported", "=", "set", "(", ")", "for", "error", "in", "xml", ".", "iterfind", "(", "\"error\"", ")", ":", "# Type of error valgrind encountered", "kind", "=", "error", ".", "find", "(", "\"kind\"", ")", ".", "text", "# Valgrind's error message", "what", "=", "error", ".", "find", "(", "\"xwhat/text\"", "if", "kind", ".", "startswith", "(", "\"Leak_\"", ")", "else", "\"what\"", ")", ".", "text", "# Error message that we will report", "msg", "=", "[", "\"\\t\"", ",", "what", "]", "# Find first stack frame within student's code.", "for", "frame", "in", "error", ".", "iterfind", "(", "\"stack/frame\"", ")", ":", "obj", "=", "frame", ".", "find", "(", "\"obj\"", ")", "if", "obj", "is", "not", "None", "and", "internal", ".", "run_dir", "in", "Path", "(", "obj", ".", "text", ")", ".", "parents", ":", "file", ",", "line", "=", "frame", ".", "find", "(", "\"file\"", ")", ",", "frame", ".", "find", "(", "\"line\"", ")", "if", "file", "is", "not", "None", "and", "line", "is", "not", "None", ":", "msg", ".", "append", "(", "f\": ({_('file')}: {file.text}, {_('line')}: {line.text})\"", ")", "break", "msg", "=", "\"\"", ".", "join", "(", "msg", ")", "if", "msg", "not", "in", "reported", ":", "log", "(", "msg", ")", "reported", ".", "add", "(", "msg", ")", "# Only raise exception if we encountered errors.", "if", "reported", ":", "raise", "Failure", "(", "_", "(", "\"valgrind tests failed; rerun with --log for more information.\"", ")", ")" ]
Log and report any errors encountered by valgrind.
[ "Log", "and", "report", "any", "errors", "encountered", "by", "valgrind", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/c.py#L89-L124
234,608
cs50/check50
check50/runner.py
_timeout
def _timeout(seconds): """Context manager that runs code block until timeout is reached. Example usage:: try: with _timeout(10): do_stuff() except Timeout: print("do_stuff timed out") """ def _handle_timeout(*args): raise Timeout(seconds) signal.signal(signal.SIGALRM, _handle_timeout) signal.alarm(seconds) try: yield finally: signal.alarm(0) signal.signal(signal.SIGALRM, signal.SIG_DFL)
python
def _timeout(seconds): """Context manager that runs code block until timeout is reached. Example usage:: try: with _timeout(10): do_stuff() except Timeout: print("do_stuff timed out") """ def _handle_timeout(*args): raise Timeout(seconds) signal.signal(signal.SIGALRM, _handle_timeout) signal.alarm(seconds) try: yield finally: signal.alarm(0) signal.signal(signal.SIGALRM, signal.SIG_DFL)
[ "def", "_timeout", "(", "seconds", ")", ":", "def", "_handle_timeout", "(", "*", "args", ")", ":", "raise", "Timeout", "(", "seconds", ")", "signal", ".", "signal", "(", "signal", ".", "SIGALRM", ",", "_handle_timeout", ")", "signal", ".", "alarm", "(", "seconds", ")", "try", ":", "yield", "finally", ":", "signal", ".", "alarm", "(", "0", ")", "signal", ".", "signal", "(", "signal", ".", "SIGALRM", ",", "signal", ".", "SIG_DFL", ")" ]
Context manager that runs code block until timeout is reached. Example usage:: try: with _timeout(10): do_stuff() except Timeout: print("do_stuff timed out")
[ "Context", "manager", "that", "runs", "code", "block", "until", "timeout", "is", "reached", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/runner.py#L52-L73
234,609
cs50/check50
check50/runner.py
CheckRunner.run
def run(self, files, working_area): """ Run checks concurrently. Returns a list of CheckResults ordered by declaration order of the checks in the imported module """ # Ensure that dictionary is ordered by check declaration order (via self.check_names) # NOTE: Requires CPython 3.6. If we need to support older versions of Python, replace with OrderedDict. results = {name: None for name in self.check_names} checks_root = working_area.parent with futures.ProcessPoolExecutor() as executor: # Start all checks that have no dependencies not_done = set(executor.submit(run_check(name, self.checks_spec, checks_root)) for name, _ in self.child_map[None]) not_passed = [] while not_done: done, not_done = futures.wait(not_done, return_when=futures.FIRST_COMPLETED) for future in done: # Get result from completed check result, state = future.result() results[result.name] = result if result.passed: # Dispatch dependent checks for child_name, _ in self.child_map[result.name]: not_done.add(executor.submit( run_check(child_name, self.checks_spec, checks_root, state))) else: not_passed.append(result.name) for name in not_passed: self._skip_children(name, results) return results.values()
python
def run(self, files, working_area): """ Run checks concurrently. Returns a list of CheckResults ordered by declaration order of the checks in the imported module """ # Ensure that dictionary is ordered by check declaration order (via self.check_names) # NOTE: Requires CPython 3.6. If we need to support older versions of Python, replace with OrderedDict. results = {name: None for name in self.check_names} checks_root = working_area.parent with futures.ProcessPoolExecutor() as executor: # Start all checks that have no dependencies not_done = set(executor.submit(run_check(name, self.checks_spec, checks_root)) for name, _ in self.child_map[None]) not_passed = [] while not_done: done, not_done = futures.wait(not_done, return_when=futures.FIRST_COMPLETED) for future in done: # Get result from completed check result, state = future.result() results[result.name] = result if result.passed: # Dispatch dependent checks for child_name, _ in self.child_map[result.name]: not_done.add(executor.submit( run_check(child_name, self.checks_spec, checks_root, state))) else: not_passed.append(result.name) for name in not_passed: self._skip_children(name, results) return results.values()
[ "def", "run", "(", "self", ",", "files", ",", "working_area", ")", ":", "# Ensure that dictionary is ordered by check declaration order (via self.check_names)", "# NOTE: Requires CPython 3.6. If we need to support older versions of Python, replace with OrderedDict.", "results", "=", "{", "name", ":", "None", "for", "name", "in", "self", ".", "check_names", "}", "checks_root", "=", "working_area", ".", "parent", "with", "futures", ".", "ProcessPoolExecutor", "(", ")", "as", "executor", ":", "# Start all checks that have no dependencies", "not_done", "=", "set", "(", "executor", ".", "submit", "(", "run_check", "(", "name", ",", "self", ".", "checks_spec", ",", "checks_root", ")", ")", "for", "name", ",", "_", "in", "self", ".", "child_map", "[", "None", "]", ")", "not_passed", "=", "[", "]", "while", "not_done", ":", "done", ",", "not_done", "=", "futures", ".", "wait", "(", "not_done", ",", "return_when", "=", "futures", ".", "FIRST_COMPLETED", ")", "for", "future", "in", "done", ":", "# Get result from completed check", "result", ",", "state", "=", "future", ".", "result", "(", ")", "results", "[", "result", ".", "name", "]", "=", "result", "if", "result", ".", "passed", ":", "# Dispatch dependent checks", "for", "child_name", ",", "_", "in", "self", ".", "child_map", "[", "result", ".", "name", "]", ":", "not_done", ".", "add", "(", "executor", ".", "submit", "(", "run_check", "(", "child_name", ",", "self", ".", "checks_spec", ",", "checks_root", ",", "state", ")", ")", ")", "else", ":", "not_passed", ".", "append", "(", "result", ".", "name", ")", "for", "name", "in", "not_passed", ":", "self", ".", "_skip_children", "(", "name", ",", "results", ")", "return", "results", ".", "values", "(", ")" ]
Run checks concurrently. Returns a list of CheckResults ordered by declaration order of the checks in the imported module
[ "Run", "checks", "concurrently", ".", "Returns", "a", "list", "of", "CheckResults", "ordered", "by", "declaration", "order", "of", "the", "checks", "in", "the", "imported", "module" ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/runner.py#L178-L212
234,610
cs50/check50
check50/py.py
append_code
def append_code(original, codefile): """Append the contents of one file to another. :param original: name of file that will be appended to :type original: str :param codefile: name of file that will be appende :type codefile: str This function is particularly useful when one wants to replace a function in student code with their own implementation of one. If two functions are defined with the same name in Python, the latter definition is taken so overwriting a function is as simple as writing it to a file and then appending it to the student's code. Example usage:: # Include a file containing our own implementation of a lookup function. check50.include("lookup.py") # Overwrite the lookup function in helpers.py with our own implementation. check50.py.append_code("helpers.py", "lookup.py") """ with open(codefile) as code, open(original, "a") as o: o.write("\n") o.writelines(code)
python
def append_code(original, codefile): """Append the contents of one file to another. :param original: name of file that will be appended to :type original: str :param codefile: name of file that will be appende :type codefile: str This function is particularly useful when one wants to replace a function in student code with their own implementation of one. If two functions are defined with the same name in Python, the latter definition is taken so overwriting a function is as simple as writing it to a file and then appending it to the student's code. Example usage:: # Include a file containing our own implementation of a lookup function. check50.include("lookup.py") # Overwrite the lookup function in helpers.py with our own implementation. check50.py.append_code("helpers.py", "lookup.py") """ with open(codefile) as code, open(original, "a") as o: o.write("\n") o.writelines(code)
[ "def", "append_code", "(", "original", ",", "codefile", ")", ":", "with", "open", "(", "codefile", ")", "as", "code", ",", "open", "(", "original", ",", "\"a\"", ")", "as", "o", ":", "o", ".", "write", "(", "\"\\n\"", ")", "o", ".", "writelines", "(", "code", ")" ]
Append the contents of one file to another. :param original: name of file that will be appended to :type original: str :param codefile: name of file that will be appende :type codefile: str This function is particularly useful when one wants to replace a function in student code with their own implementation of one. If two functions are defined with the same name in Python, the latter definition is taken so overwriting a function is as simple as writing it to a file and then appending it to the student's code. Example usage:: # Include a file containing our own implementation of a lookup function. check50.include("lookup.py") # Overwrite the lookup function in helpers.py with our own implementation. check50.py.append_code("helpers.py", "lookup.py")
[ "Append", "the", "contents", "of", "one", "file", "to", "another", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/py.py#L10-L34
234,611
cs50/check50
check50/py.py
import_
def import_(path): """Import a Python program given a raw file path :param path: path to python file to be imported :type path: str :raises check50.Failure: if ``path`` doesn't exist, or if the Python file at ``path`` throws an exception when imported. """ exists(path) log(_("importing {}...").format(path)) name = Path(path).stem try: return internal.import_file(name, path) except Exception as e: raise Failure(str(e))
python
def import_(path): """Import a Python program given a raw file path :param path: path to python file to be imported :type path: str :raises check50.Failure: if ``path`` doesn't exist, or if the Python file at ``path`` throws an exception when imported. """ exists(path) log(_("importing {}...").format(path)) name = Path(path).stem try: return internal.import_file(name, path) except Exception as e: raise Failure(str(e))
[ "def", "import_", "(", "path", ")", ":", "exists", "(", "path", ")", "log", "(", "_", "(", "\"importing {}...\"", ")", ".", "format", "(", "path", ")", ")", "name", "=", "Path", "(", "path", ")", ".", "stem", "try", ":", "return", "internal", ".", "import_file", "(", "name", ",", "path", ")", "except", "Exception", "as", "e", ":", "raise", "Failure", "(", "str", "(", "e", ")", ")" ]
Import a Python program given a raw file path :param path: path to python file to be imported :type path: str :raises check50.Failure: if ``path`` doesn't exist, or if the Python file at ``path`` throws an exception when imported.
[ "Import", "a", "Python", "program", "given", "a", "raw", "file", "path" ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/py.py#L37-L50
234,612
cs50/check50
check50/py.py
compile
def compile(file): """ Compile a Python program into byte code :param file: file to be compiled :raises check50.Failure: if compilation fails e.g. if there is a SyntaxError """ log(_("compiling {} into byte code...").format(file)) try: py_compile.compile(file, doraise=True) except py_compile.PyCompileError as e: log(_("Exception raised: ")) for line in e.msg.splitlines(): log(line) raise Failure(_("{} raised while compiling {} (rerun with --log for more details)").format(e.exc_type_name, file))
python
def compile(file): """ Compile a Python program into byte code :param file: file to be compiled :raises check50.Failure: if compilation fails e.g. if there is a SyntaxError """ log(_("compiling {} into byte code...").format(file)) try: py_compile.compile(file, doraise=True) except py_compile.PyCompileError as e: log(_("Exception raised: ")) for line in e.msg.splitlines(): log(line) raise Failure(_("{} raised while compiling {} (rerun with --log for more details)").format(e.exc_type_name, file))
[ "def", "compile", "(", "file", ")", ":", "log", "(", "_", "(", "\"compiling {} into byte code...\"", ")", ".", "format", "(", "file", ")", ")", "try", ":", "py_compile", ".", "compile", "(", "file", ",", "doraise", "=", "True", ")", "except", "py_compile", ".", "PyCompileError", "as", "e", ":", "log", "(", "_", "(", "\"Exception raised: \"", ")", ")", "for", "line", "in", "e", ".", "msg", ".", "splitlines", "(", ")", ":", "log", "(", "line", ")", "raise", "Failure", "(", "_", "(", "\"{} raised while compiling {} (rerun with --log for more details)\"", ")", ".", "format", "(", "e", ".", "exc_type_name", ",", "file", ")", ")" ]
Compile a Python program into byte code :param file: file to be compiled :raises check50.Failure: if compilation fails e.g. if there is a SyntaxError
[ "Compile", "a", "Python", "program", "into", "byte", "code" ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/py.py#L52-L68
234,613
cs50/check50
check50/flask.py
app.get
def get(self, route, data=None, params=None, follow_redirects=True): """Send GET request to app. :param route: route to send request to :type route: str :param data: form data to include in request :type data: dict :param params: URL parameters to include in request :param follow_redirects: enable redirection (defaults to ``True``) :type follow_redirects: bool :returns: ``self`` :raises check50.Failure: if Flask application throws an uncaught exception Example usage:: check50.flask.app("application.py").get("/buy", params={"q": "02138"}).content() """ return self._send("GET", route, data, params, follow_redirects=follow_redirects)
python
def get(self, route, data=None, params=None, follow_redirects=True): """Send GET request to app. :param route: route to send request to :type route: str :param data: form data to include in request :type data: dict :param params: URL parameters to include in request :param follow_redirects: enable redirection (defaults to ``True``) :type follow_redirects: bool :returns: ``self`` :raises check50.Failure: if Flask application throws an uncaught exception Example usage:: check50.flask.app("application.py").get("/buy", params={"q": "02138"}).content() """ return self._send("GET", route, data, params, follow_redirects=follow_redirects)
[ "def", "get", "(", "self", ",", "route", ",", "data", "=", "None", ",", "params", "=", "None", ",", "follow_redirects", "=", "True", ")", ":", "return", "self", ".", "_send", "(", "\"GET\"", ",", "route", ",", "data", ",", "params", ",", "follow_redirects", "=", "follow_redirects", ")" ]
Send GET request to app. :param route: route to send request to :type route: str :param data: form data to include in request :type data: dict :param params: URL parameters to include in request :param follow_redirects: enable redirection (defaults to ``True``) :type follow_redirects: bool :returns: ``self`` :raises check50.Failure: if Flask application throws an uncaught exception Example usage:: check50.flask.app("application.py").get("/buy", params={"q": "02138"}).content()
[ "Send", "GET", "request", "to", "app", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/flask.py#L52-L69
234,614
cs50/check50
check50/flask.py
app.post
def post(self, route, data=None, params=None, follow_redirects=True): """Send POST request to app. :param route: route to send request to :type route: str :param data: form data to include in request :type data: dict :param params: URL parameters to include in request :param follow_redirects: enable redirection (defaults to ``True``) :type follow_redirects: bool :raises check50.Failure: if Flask application throws an uncaught exception Example usage:: check50.flask.app("application.py").post("/buy", data={"symbol": "GOOG", "shares": 10}).status(200) """ return self._send("POST", route, data, params, follow_redirects=follow_redirects)
python
def post(self, route, data=None, params=None, follow_redirects=True): """Send POST request to app. :param route: route to send request to :type route: str :param data: form data to include in request :type data: dict :param params: URL parameters to include in request :param follow_redirects: enable redirection (defaults to ``True``) :type follow_redirects: bool :raises check50.Failure: if Flask application throws an uncaught exception Example usage:: check50.flask.app("application.py").post("/buy", data={"symbol": "GOOG", "shares": 10}).status(200) """ return self._send("POST", route, data, params, follow_redirects=follow_redirects)
[ "def", "post", "(", "self", ",", "route", ",", "data", "=", "None", ",", "params", "=", "None", ",", "follow_redirects", "=", "True", ")", ":", "return", "self", ".", "_send", "(", "\"POST\"", ",", "route", ",", "data", ",", "params", ",", "follow_redirects", "=", "follow_redirects", ")" ]
Send POST request to app. :param route: route to send request to :type route: str :param data: form data to include in request :type data: dict :param params: URL parameters to include in request :param follow_redirects: enable redirection (defaults to ``True``) :type follow_redirects: bool :raises check50.Failure: if Flask application throws an uncaught exception Example usage:: check50.flask.app("application.py").post("/buy", data={"symbol": "GOOG", "shares": 10}).status(200)
[ "Send", "POST", "request", "to", "app", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/flask.py#L71-L88
234,615
cs50/check50
check50/flask.py
app.status
def status(self, code=None): """Check status code in response returned by application. If ``code`` is not None, assert that ``code`` is returned by application, else simply return the status code. :param code: ``code`` to assert that application returns :type code: int Example usage:: check50.flask.app("application.py").status(200) status = check50.flask.app("application.py").get("/").status() if status != 200: raise check50.Failure(f"expected status code 200, but got {status}") """ if code is None: return self.response.status_code log(_("checking that status code {} is returned...").format(code)) if code != self.response.status_code: raise Failure(_("expected status code {}, but got {}").format( code, self.response.status_code)) return self
python
def status(self, code=None): """Check status code in response returned by application. If ``code`` is not None, assert that ``code`` is returned by application, else simply return the status code. :param code: ``code`` to assert that application returns :type code: int Example usage:: check50.flask.app("application.py").status(200) status = check50.flask.app("application.py").get("/").status() if status != 200: raise check50.Failure(f"expected status code 200, but got {status}") """ if code is None: return self.response.status_code log(_("checking that status code {} is returned...").format(code)) if code != self.response.status_code: raise Failure(_("expected status code {}, but got {}").format( code, self.response.status_code)) return self
[ "def", "status", "(", "self", ",", "code", "=", "None", ")", ":", "if", "code", "is", "None", ":", "return", "self", ".", "response", ".", "status_code", "log", "(", "_", "(", "\"checking that status code {} is returned...\"", ")", ".", "format", "(", "code", ")", ")", "if", "code", "!=", "self", ".", "response", ".", "status_code", ":", "raise", "Failure", "(", "_", "(", "\"expected status code {}, but got {}\"", ")", ".", "format", "(", "code", ",", "self", ".", "response", ".", "status_code", ")", ")", "return", "self" ]
Check status code in response returned by application. If ``code`` is not None, assert that ``code`` is returned by application, else simply return the status code. :param code: ``code`` to assert that application returns :type code: int Example usage:: check50.flask.app("application.py").status(200) status = check50.flask.app("application.py").get("/").status() if status != 200: raise check50.Failure(f"expected status code 200, but got {status}")
[ "Check", "status", "code", "in", "response", "returned", "by", "application", ".", "If", "code", "is", "not", "None", "assert", "that", "code", "is", "returned", "by", "application", "else", "simply", "return", "the", "status", "code", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/flask.py#L90-L114
234,616
cs50/check50
check50/flask.py
app.raw_content
def raw_content(self, output=None, str_output=None): """Searches for `output` regex match within content of page, regardless of mimetype.""" return self._search_page(output, str_output, self.response.data, lambda regex, content: regex.search(content.decode()))
python
def raw_content(self, output=None, str_output=None): """Searches for `output` regex match within content of page, regardless of mimetype.""" return self._search_page(output, str_output, self.response.data, lambda regex, content: regex.search(content.decode()))
[ "def", "raw_content", "(", "self", ",", "output", "=", "None", ",", "str_output", "=", "None", ")", ":", "return", "self", ".", "_search_page", "(", "output", ",", "str_output", ",", "self", ".", "response", ".", "data", ",", "lambda", "regex", ",", "content", ":", "regex", ".", "search", "(", "content", ".", "decode", "(", ")", ")", ")" ]
Searches for `output` regex match within content of page, regardless of mimetype.
[ "Searches", "for", "output", "regex", "match", "within", "content", "of", "page", "regardless", "of", "mimetype", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/flask.py#L116-L118
234,617
cs50/check50
check50/flask.py
app.content
def content(self, output=None, str_output=None, **kwargs): """Searches for `output` regex within HTML page. kwargs are passed to BeautifulSoup's find function to filter for tags.""" if self.response.mimetype != "text/html": raise Failure(_("expected request to return HTML, but it returned {}").format( self.response.mimetype)) # TODO: Remove once beautiful soup updates to accomodate python 3.7 with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) content = BeautifulSoup(self.response.data, "html.parser") return self._search_page( output, str_output, content, lambda regex, content: any(regex.search(str(tag)) for tag in content.find_all(**kwargs)))
python
def content(self, output=None, str_output=None, **kwargs): """Searches for `output` regex within HTML page. kwargs are passed to BeautifulSoup's find function to filter for tags.""" if self.response.mimetype != "text/html": raise Failure(_("expected request to return HTML, but it returned {}").format( self.response.mimetype)) # TODO: Remove once beautiful soup updates to accomodate python 3.7 with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) content = BeautifulSoup(self.response.data, "html.parser") return self._search_page( output, str_output, content, lambda regex, content: any(regex.search(str(tag)) for tag in content.find_all(**kwargs)))
[ "def", "content", "(", "self", ",", "output", "=", "None", ",", "str_output", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "response", ".", "mimetype", "!=", "\"text/html\"", ":", "raise", "Failure", "(", "_", "(", "\"expected request to return HTML, but it returned {}\"", ")", ".", "format", "(", "self", ".", "response", ".", "mimetype", ")", ")", "# TODO: Remove once beautiful soup updates to accomodate python 3.7", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "\"ignore\"", ",", "category", "=", "DeprecationWarning", ")", "content", "=", "BeautifulSoup", "(", "self", ".", "response", ".", "data", ",", "\"html.parser\"", ")", "return", "self", ".", "_search_page", "(", "output", ",", "str_output", ",", "content", ",", "lambda", "regex", ",", "content", ":", "any", "(", "regex", ".", "search", "(", "str", "(", "tag", ")", ")", "for", "tag", "in", "content", ".", "find_all", "(", "*", "*", "kwargs", ")", ")", ")" ]
Searches for `output` regex within HTML page. kwargs are passed to BeautifulSoup's find function to filter for tags.
[ "Searches", "for", "output", "regex", "within", "HTML", "page", ".", "kwargs", "are", "passed", "to", "BeautifulSoup", "s", "find", "function", "to", "filter", "for", "tags", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/flask.py#L120-L135
234,618
cs50/check50
check50/flask.py
app._send
def _send(self, method, route, data, params, **kwargs): """Send request of type `method` to `route`.""" route = self._fmt_route(route, params) log(_("sending {} request to {}").format(method.upper(), route)) try: self.response = getattr(self._client, method.lower())(route, data=data, **kwargs) except BaseException as e: # Catch all exceptions thrown by app log(_("exception raised in application: {}: {}").format(type(e).__name__, e)) raise Failure(_("application raised an exception (rerun with --log for more details)")) return self
python
def _send(self, method, route, data, params, **kwargs): """Send request of type `method` to `route`.""" route = self._fmt_route(route, params) log(_("sending {} request to {}").format(method.upper(), route)) try: self.response = getattr(self._client, method.lower())(route, data=data, **kwargs) except BaseException as e: # Catch all exceptions thrown by app log(_("exception raised in application: {}: {}").format(type(e).__name__, e)) raise Failure(_("application raised an exception (rerun with --log for more details)")) return self
[ "def", "_send", "(", "self", ",", "method", ",", "route", ",", "data", ",", "params", ",", "*", "*", "kwargs", ")", ":", "route", "=", "self", ".", "_fmt_route", "(", "route", ",", "params", ")", "log", "(", "_", "(", "\"sending {} request to {}\"", ")", ".", "format", "(", "method", ".", "upper", "(", ")", ",", "route", ")", ")", "try", ":", "self", ".", "response", "=", "getattr", "(", "self", ".", "_client", ",", "method", ".", "lower", "(", ")", ")", "(", "route", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", "except", "BaseException", "as", "e", ":", "# Catch all exceptions thrown by app", "log", "(", "_", "(", "\"exception raised in application: {}: {}\"", ")", ".", "format", "(", "type", "(", "e", ")", ".", "__name__", ",", "e", ")", ")", "raise", "Failure", "(", "_", "(", "\"application raised an exception (rerun with --log for more details)\"", ")", ")", "return", "self" ]
Send request of type `method` to `route`.
[ "Send", "request", "of", "type", "method", "to", "route", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/flask.py#L137-L146
234,619
cs50/check50
check50/simple.py
compile
def compile(checks): """Returns compiled check50 checks from simple YAML checks in path.""" out = ["import check50"] for name, check in checks.items(): out.append(_compile_check(name, check)) return "\n\n".join(out)
python
def compile(checks): """Returns compiled check50 checks from simple YAML checks in path.""" out = ["import check50"] for name, check in checks.items(): out.append(_compile_check(name, check)) return "\n\n".join(out)
[ "def", "compile", "(", "checks", ")", ":", "out", "=", "[", "\"import check50\"", "]", "for", "name", ",", "check", "in", "checks", ".", "items", "(", ")", ":", "out", ".", "append", "(", "_compile_check", "(", "name", ",", "check", ")", ")", "return", "\"\\n\\n\"", ".", "join", "(", "out", ")" ]
Returns compiled check50 checks from simple YAML checks in path.
[ "Returns", "compiled", "check50", "checks", "from", "simple", "YAML", "checks", "in", "path", "." ]
42c1f0c36baa6a24f69742d74551a9ea7a5ceb33
https://github.com/cs50/check50/blob/42c1f0c36baa6a24f69742d74551a9ea7a5ceb33/check50/simple.py#L8-L16
234,620
quantopian/trading_calendars
trading_calendars/utils/pandas_utils.py
days_at_time
def days_at_time(days, t, tz, day_offset=0): """ Create an index of days at time ``t``, interpreted in timezone ``tz``. The returned index is localized to UTC. Parameters ---------- days : DatetimeIndex An index of dates (represented as midnight). t : datetime.time The time to apply as an offset to each day in ``days``. tz : pytz.timezone The timezone to use to interpret ``t``. day_offset : int The number of days we want to offset @days by Examples -------- In the example below, the times switch from 13:45 to 12:45 UTC because March 13th is the daylight savings transition for US/Eastern. All the times are still 8:45 when interpreted in US/Eastern. >>> import pandas as pd; import datetime; import pprint >>> dts = pd.date_range('2016-03-12', '2016-03-14') >>> dts_at_845 = days_at_time(dts, datetime.time(8, 45), 'US/Eastern') >>> pprint.pprint([str(dt) for dt in dts_at_845]) ['2016-03-12 13:45:00+00:00', '2016-03-13 12:45:00+00:00', '2016-03-14 12:45:00+00:00'] """ days = pd.DatetimeIndex(days).tz_localize(None) if len(days) == 0: return days.tz_localize(UTC) # Offset days without tz to avoid timezone issues. delta = pd.Timedelta( days=day_offset, hours=t.hour, minutes=t.minute, seconds=t.second, ) return (days + delta).tz_localize(tz).tz_convert(UTC)
python
def days_at_time(days, t, tz, day_offset=0): """ Create an index of days at time ``t``, interpreted in timezone ``tz``. The returned index is localized to UTC. Parameters ---------- days : DatetimeIndex An index of dates (represented as midnight). t : datetime.time The time to apply as an offset to each day in ``days``. tz : pytz.timezone The timezone to use to interpret ``t``. day_offset : int The number of days we want to offset @days by Examples -------- In the example below, the times switch from 13:45 to 12:45 UTC because March 13th is the daylight savings transition for US/Eastern. All the times are still 8:45 when interpreted in US/Eastern. >>> import pandas as pd; import datetime; import pprint >>> dts = pd.date_range('2016-03-12', '2016-03-14') >>> dts_at_845 = days_at_time(dts, datetime.time(8, 45), 'US/Eastern') >>> pprint.pprint([str(dt) for dt in dts_at_845]) ['2016-03-12 13:45:00+00:00', '2016-03-13 12:45:00+00:00', '2016-03-14 12:45:00+00:00'] """ days = pd.DatetimeIndex(days).tz_localize(None) if len(days) == 0: return days.tz_localize(UTC) # Offset days without tz to avoid timezone issues. delta = pd.Timedelta( days=day_offset, hours=t.hour, minutes=t.minute, seconds=t.second, ) return (days + delta).tz_localize(tz).tz_convert(UTC)
[ "def", "days_at_time", "(", "days", ",", "t", ",", "tz", ",", "day_offset", "=", "0", ")", ":", "days", "=", "pd", ".", "DatetimeIndex", "(", "days", ")", ".", "tz_localize", "(", "None", ")", "if", "len", "(", "days", ")", "==", "0", ":", "return", "days", ".", "tz_localize", "(", "UTC", ")", "# Offset days without tz to avoid timezone issues.", "delta", "=", "pd", ".", "Timedelta", "(", "days", "=", "day_offset", ",", "hours", "=", "t", ".", "hour", ",", "minutes", "=", "t", ".", "minute", ",", "seconds", "=", "t", ".", "second", ",", ")", "return", "(", "days", "+", "delta", ")", ".", "tz_localize", "(", "tz", ")", ".", "tz_convert", "(", "UTC", ")" ]
Create an index of days at time ``t``, interpreted in timezone ``tz``. The returned index is localized to UTC. Parameters ---------- days : DatetimeIndex An index of dates (represented as midnight). t : datetime.time The time to apply as an offset to each day in ``days``. tz : pytz.timezone The timezone to use to interpret ``t``. day_offset : int The number of days we want to offset @days by Examples -------- In the example below, the times switch from 13:45 to 12:45 UTC because March 13th is the daylight savings transition for US/Eastern. All the times are still 8:45 when interpreted in US/Eastern. >>> import pandas as pd; import datetime; import pprint >>> dts = pd.date_range('2016-03-12', '2016-03-14') >>> dts_at_845 = days_at_time(dts, datetime.time(8, 45), 'US/Eastern') >>> pprint.pprint([str(dt) for dt in dts_at_845]) ['2016-03-12 13:45:00+00:00', '2016-03-13 12:45:00+00:00', '2016-03-14 12:45:00+00:00']
[ "Create", "an", "index", "of", "days", "at", "time", "t", "interpreted", "in", "timezone", "tz", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/utils/pandas_utils.py#L6-L48
234,621
quantopian/trading_calendars
trading_calendars/common_holidays.py
weekend_boxing_day
def weekend_boxing_day(start_date=None, end_date=None, observance=None): """ If boxing day is saturday then Monday 28th is a holiday If boxing day is sunday then Tuesday 28th is a holiday """ return Holiday( "Weekend Boxing Day", month=12, day=28, days_of_week=(MONDAY, TUESDAY), start_date=start_date, end_date=end_date, observance=observance, )
python
def weekend_boxing_day(start_date=None, end_date=None, observance=None): """ If boxing day is saturday then Monday 28th is a holiday If boxing day is sunday then Tuesday 28th is a holiday """ return Holiday( "Weekend Boxing Day", month=12, day=28, days_of_week=(MONDAY, TUESDAY), start_date=start_date, end_date=end_date, observance=observance, )
[ "def", "weekend_boxing_day", "(", "start_date", "=", "None", ",", "end_date", "=", "None", ",", "observance", "=", "None", ")", ":", "return", "Holiday", "(", "\"Weekend Boxing Day\"", ",", "month", "=", "12", ",", "day", "=", "28", ",", "days_of_week", "=", "(", "MONDAY", ",", "TUESDAY", ")", ",", "start_date", "=", "start_date", ",", "end_date", "=", "end_date", ",", "observance", "=", "observance", ",", ")" ]
If boxing day is saturday then Monday 28th is a holiday If boxing day is sunday then Tuesday 28th is a holiday
[ "If", "boxing", "day", "is", "saturday", "then", "Monday", "28th", "is", "a", "holiday", "If", "boxing", "day", "is", "sunday", "then", "Tuesday", "28th", "is", "a", "holiday" ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/common_holidays.py#L261-L274
234,622
quantopian/trading_calendars
trading_calendars/xtks_holidays.py
is_holiday_or_weekend
def is_holiday_or_weekend(holidays, dt): """ Given a list of holidays, return whether dt is a holiday or it is on a weekend. """ one_day = timedelta(days=1) for h in holidays: if dt in h.dates(dt - one_day, dt + one_day) or \ dt.weekday() in WEEKENDS: return True return False
python
def is_holiday_or_weekend(holidays, dt): """ Given a list of holidays, return whether dt is a holiday or it is on a weekend. """ one_day = timedelta(days=1) for h in holidays: if dt in h.dates(dt - one_day, dt + one_day) or \ dt.weekday() in WEEKENDS: return True return False
[ "def", "is_holiday_or_weekend", "(", "holidays", ",", "dt", ")", ":", "one_day", "=", "timedelta", "(", "days", "=", "1", ")", "for", "h", "in", "holidays", ":", "if", "dt", "in", "h", ".", "dates", "(", "dt", "-", "one_day", ",", "dt", "+", "one_day", ")", "or", "dt", ".", "weekday", "(", ")", "in", "WEEKENDS", ":", "return", "True", "return", "False" ]
Given a list of holidays, return whether dt is a holiday or it is on a weekend.
[ "Given", "a", "list", "of", "holidays", "return", "whether", "dt", "is", "a", "holiday", "or", "it", "is", "on", "a", "weekend", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/xtks_holidays.py#L18-L30
234,623
quantopian/trading_calendars
trading_calendars/xtks_holidays.py
next_non_holiday_weekday
def next_non_holiday_weekday(holidays, dt): """ If a holiday falls on a Sunday, observe it on the next non-holiday weekday. Parameters ---------- holidays : list[pd.tseries.holiday.Holiday] list of holidays dt : pd.Timestamp date of holiday. """ day_of_week = dt.weekday() if day_of_week == SUNDAY: while is_holiday_or_weekend(holidays, dt): dt += timedelta(1) return dt
python
def next_non_holiday_weekday(holidays, dt): """ If a holiday falls on a Sunday, observe it on the next non-holiday weekday. Parameters ---------- holidays : list[pd.tseries.holiday.Holiday] list of holidays dt : pd.Timestamp date of holiday. """ day_of_week = dt.weekday() if day_of_week == SUNDAY: while is_holiday_or_weekend(holidays, dt): dt += timedelta(1) return dt
[ "def", "next_non_holiday_weekday", "(", "holidays", ",", "dt", ")", ":", "day_of_week", "=", "dt", ".", "weekday", "(", ")", "if", "day_of_week", "==", "SUNDAY", ":", "while", "is_holiday_or_weekend", "(", "holidays", ",", "dt", ")", ":", "dt", "+=", "timedelta", "(", "1", ")", "return", "dt" ]
If a holiday falls on a Sunday, observe it on the next non-holiday weekday. Parameters ---------- holidays : list[pd.tseries.holiday.Holiday] list of holidays dt : pd.Timestamp date of holiday.
[ "If", "a", "holiday", "falls", "on", "a", "Sunday", "observe", "it", "on", "the", "next", "non", "-", "holiday", "weekday", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/xtks_holidays.py#L33-L50
234,624
quantopian/trading_calendars
trading_calendars/calendar_helpers.py
compute_all_minutes
def compute_all_minutes(opens_in_ns, closes_in_ns): """ Given arrays of opens and closes, both in nanoseconds, return an array of each minute between the opens and closes. """ deltas = closes_in_ns - opens_in_ns # + 1 because we want 390 mins per standard day, not 389 daily_sizes = (deltas // NANOSECONDS_PER_MINUTE) + 1 num_minutes = daily_sizes.sum() # One allocation for the entire thing. This assumes that each day # represents a contiguous block of minutes. pieces = [] for open_, size in zip(opens_in_ns, daily_sizes): pieces.append( np.arange(open_, open_ + size * NANOSECONDS_PER_MINUTE, NANOSECONDS_PER_MINUTE) ) out = np.concatenate(pieces).view('datetime64[ns]') assert len(out) == num_minutes return out
python
def compute_all_minutes(opens_in_ns, closes_in_ns): """ Given arrays of opens and closes, both in nanoseconds, return an array of each minute between the opens and closes. """ deltas = closes_in_ns - opens_in_ns # + 1 because we want 390 mins per standard day, not 389 daily_sizes = (deltas // NANOSECONDS_PER_MINUTE) + 1 num_minutes = daily_sizes.sum() # One allocation for the entire thing. This assumes that each day # represents a contiguous block of minutes. pieces = [] for open_, size in zip(opens_in_ns, daily_sizes): pieces.append( np.arange(open_, open_ + size * NANOSECONDS_PER_MINUTE, NANOSECONDS_PER_MINUTE) ) out = np.concatenate(pieces).view('datetime64[ns]') assert len(out) == num_minutes return out
[ "def", "compute_all_minutes", "(", "opens_in_ns", ",", "closes_in_ns", ")", ":", "deltas", "=", "closes_in_ns", "-", "opens_in_ns", "# + 1 because we want 390 mins per standard day, not 389", "daily_sizes", "=", "(", "deltas", "//", "NANOSECONDS_PER_MINUTE", ")", "+", "1", "num_minutes", "=", "daily_sizes", ".", "sum", "(", ")", "# One allocation for the entire thing. This assumes that each day", "# represents a contiguous block of minutes.", "pieces", "=", "[", "]", "for", "open_", ",", "size", "in", "zip", "(", "opens_in_ns", ",", "daily_sizes", ")", ":", "pieces", ".", "append", "(", "np", ".", "arange", "(", "open_", ",", "open_", "+", "size", "*", "NANOSECONDS_PER_MINUTE", ",", "NANOSECONDS_PER_MINUTE", ")", ")", "out", "=", "np", ".", "concatenate", "(", "pieces", ")", ".", "view", "(", "'datetime64[ns]'", ")", "assert", "len", "(", "out", ")", "==", "num_minutes", "return", "out" ]
Given arrays of opens and closes, both in nanoseconds, return an array of each minute between the opens and closes.
[ "Given", "arrays", "of", "opens", "and", "closes", "both", "in", "nanoseconds", "return", "an", "array", "of", "each", "minute", "between", "the", "opens", "and", "closes", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_helpers.py#L47-L71
234,625
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.get_calendar
def get_calendar(self, name): """ Retrieves an instance of an TradingCalendar whose name is given. Parameters ---------- name : str The name of the TradingCalendar to be retrieved. Returns ------- calendar : calendars.TradingCalendar The desired calendar. """ canonical_name = self.resolve_alias(name) try: return self._calendars[canonical_name] except KeyError: # We haven't loaded this calendar yet, so make a new one. pass try: factory = self._calendar_factories[canonical_name] except KeyError: # We don't have a factory registered for this name. Barf. raise InvalidCalendarName(calendar_name=name) # Cache the calendar for future use. calendar = self._calendars[canonical_name] = factory() return calendar
python
def get_calendar(self, name): """ Retrieves an instance of an TradingCalendar whose name is given. Parameters ---------- name : str The name of the TradingCalendar to be retrieved. Returns ------- calendar : calendars.TradingCalendar The desired calendar. """ canonical_name = self.resolve_alias(name) try: return self._calendars[canonical_name] except KeyError: # We haven't loaded this calendar yet, so make a new one. pass try: factory = self._calendar_factories[canonical_name] except KeyError: # We don't have a factory registered for this name. Barf. raise InvalidCalendarName(calendar_name=name) # Cache the calendar for future use. calendar = self._calendars[canonical_name] = factory() return calendar
[ "def", "get_calendar", "(", "self", ",", "name", ")", ":", "canonical_name", "=", "self", ".", "resolve_alias", "(", "name", ")", "try", ":", "return", "self", ".", "_calendars", "[", "canonical_name", "]", "except", "KeyError", ":", "# We haven't loaded this calendar yet, so make a new one.", "pass", "try", ":", "factory", "=", "self", ".", "_calendar_factories", "[", "canonical_name", "]", "except", "KeyError", ":", "# We don't have a factory registered for this name. Barf.", "raise", "InvalidCalendarName", "(", "calendar_name", "=", "name", ")", "# Cache the calendar for future use.", "calendar", "=", "self", ".", "_calendars", "[", "canonical_name", "]", "=", "factory", "(", ")", "return", "calendar" ]
Retrieves an instance of an TradingCalendar whose name is given. Parameters ---------- name : str The name of the TradingCalendar to be retrieved. Returns ------- calendar : calendars.TradingCalendar The desired calendar.
[ "Retrieves", "an", "instance", "of", "an", "TradingCalendar", "whose", "name", "is", "given", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L118-L148
234,626
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.register_calendar
def register_calendar(self, name, calendar, force=False): """ Registers a calendar for retrieval by the get_calendar method. Parameters ---------- name: str The key with which to register this calendar. calendar: TradingCalendar The calendar to be registered for retrieval. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. Raises ------ CalendarNameCollision If a calendar is already registered with the given calendar's name. """ if force: self.deregister_calendar(name) if self.has_calendar(name): raise CalendarNameCollision(calendar_name=name) self._calendars[name] = calendar
python
def register_calendar(self, name, calendar, force=False): """ Registers a calendar for retrieval by the get_calendar method. Parameters ---------- name: str The key with which to register this calendar. calendar: TradingCalendar The calendar to be registered for retrieval. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. Raises ------ CalendarNameCollision If a calendar is already registered with the given calendar's name. """ if force: self.deregister_calendar(name) if self.has_calendar(name): raise CalendarNameCollision(calendar_name=name) self._calendars[name] = calendar
[ "def", "register_calendar", "(", "self", ",", "name", ",", "calendar", ",", "force", "=", "False", ")", ":", "if", "force", ":", "self", ".", "deregister_calendar", "(", "name", ")", "if", "self", ".", "has_calendar", "(", "name", ")", ":", "raise", "CalendarNameCollision", "(", "calendar_name", "=", "name", ")", "self", ".", "_calendars", "[", "name", "]", "=", "calendar" ]
Registers a calendar for retrieval by the get_calendar method. Parameters ---------- name: str The key with which to register this calendar. calendar: TradingCalendar The calendar to be registered for retrieval. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. Raises ------ CalendarNameCollision If a calendar is already registered with the given calendar's name.
[ "Registers", "a", "calendar", "for", "retrieval", "by", "the", "get_calendar", "method", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L160-L186
234,627
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.register_calendar_type
def register_calendar_type(self, name, calendar_type, force=False): """ Registers a calendar by type. This is useful for registering a new calendar to be lazily instantiated at some future point in time. Parameters ---------- name: str The key with which to register this calendar. calendar_type: type The type of the calendar to register. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. Raises ------ CalendarNameCollision If a calendar is already registered with the given calendar's name. """ if force: self.deregister_calendar(name) if self.has_calendar(name): raise CalendarNameCollision(calendar_name=name) self._calendar_factories[name] = calendar_type
python
def register_calendar_type(self, name, calendar_type, force=False): """ Registers a calendar by type. This is useful for registering a new calendar to be lazily instantiated at some future point in time. Parameters ---------- name: str The key with which to register this calendar. calendar_type: type The type of the calendar to register. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. Raises ------ CalendarNameCollision If a calendar is already registered with the given calendar's name. """ if force: self.deregister_calendar(name) if self.has_calendar(name): raise CalendarNameCollision(calendar_name=name) self._calendar_factories[name] = calendar_type
[ "def", "register_calendar_type", "(", "self", ",", "name", ",", "calendar_type", ",", "force", "=", "False", ")", ":", "if", "force", ":", "self", ".", "deregister_calendar", "(", "name", ")", "if", "self", ".", "has_calendar", "(", "name", ")", ":", "raise", "CalendarNameCollision", "(", "calendar_name", "=", "name", ")", "self", ".", "_calendar_factories", "[", "name", "]", "=", "calendar_type" ]
Registers a calendar by type. This is useful for registering a new calendar to be lazily instantiated at some future point in time. Parameters ---------- name: str The key with which to register this calendar. calendar_type: type The type of the calendar to register. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. Raises ------ CalendarNameCollision If a calendar is already registered with the given calendar's name.
[ "Registers", "a", "calendar", "by", "type", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L188-L217
234,628
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.register_calendar_alias
def register_calendar_alias(self, alias, real_name, force=False): """ Register an alias for a calendar. This is useful when multiple exchanges should share a calendar, or when there are multiple ways to refer to the same exchange. After calling ``register_alias('alias', 'real_name')``, subsequent calls to ``get_calendar('alias')`` will return the same result as ``get_calendar('real_name')``. Parameters ---------- alias : str The name to be used to refer to a calendar. real_name : str The canonical name of the registered calendar. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. """ if force: self.deregister_calendar(alias) if self.has_calendar(alias): raise CalendarNameCollision(calendar_name=alias) self._aliases[alias] = real_name # Ensure that the new alias doesn't create a cycle, and back it out if # we did. try: self.resolve_alias(alias) except CyclicCalendarAlias: del self._aliases[alias] raise
python
def register_calendar_alias(self, alias, real_name, force=False): """ Register an alias for a calendar. This is useful when multiple exchanges should share a calendar, or when there are multiple ways to refer to the same exchange. After calling ``register_alias('alias', 'real_name')``, subsequent calls to ``get_calendar('alias')`` will return the same result as ``get_calendar('real_name')``. Parameters ---------- alias : str The name to be used to refer to a calendar. real_name : str The canonical name of the registered calendar. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False. """ if force: self.deregister_calendar(alias) if self.has_calendar(alias): raise CalendarNameCollision(calendar_name=alias) self._aliases[alias] = real_name # Ensure that the new alias doesn't create a cycle, and back it out if # we did. try: self.resolve_alias(alias) except CyclicCalendarAlias: del self._aliases[alias] raise
[ "def", "register_calendar_alias", "(", "self", ",", "alias", ",", "real_name", ",", "force", "=", "False", ")", ":", "if", "force", ":", "self", ".", "deregister_calendar", "(", "alias", ")", "if", "self", ".", "has_calendar", "(", "alias", ")", ":", "raise", "CalendarNameCollision", "(", "calendar_name", "=", "alias", ")", "self", ".", "_aliases", "[", "alias", "]", "=", "real_name", "# Ensure that the new alias doesn't create a cycle, and back it out if", "# we did.", "try", ":", "self", ".", "resolve_alias", "(", "alias", ")", "except", "CyclicCalendarAlias", ":", "del", "self", ".", "_aliases", "[", "alias", "]", "raise" ]
Register an alias for a calendar. This is useful when multiple exchanges should share a calendar, or when there are multiple ways to refer to the same exchange. After calling ``register_alias('alias', 'real_name')``, subsequent calls to ``get_calendar('alias')`` will return the same result as ``get_calendar('real_name')``. Parameters ---------- alias : str The name to be used to refer to a calendar. real_name : str The canonical name of the registered calendar. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False.
[ "Register", "an", "alias", "for", "a", "calendar", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L219-L255
234,629
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.resolve_alias
def resolve_alias(self, name): """ Resolve a calendar alias for retrieval. Parameters ---------- name : str The name of the requested calendar. Returns ------- canonical_name : str The real name of the calendar to create/return. """ seen = [] while name in self._aliases: seen.append(name) name = self._aliases[name] # This is O(N ** 2), but if there's an alias chain longer than 2, # something strange has happened. if name in seen: seen.append(name) raise CyclicCalendarAlias( cycle=" -> ".join(repr(k) for k in seen) ) return name
python
def resolve_alias(self, name): """ Resolve a calendar alias for retrieval. Parameters ---------- name : str The name of the requested calendar. Returns ------- canonical_name : str The real name of the calendar to create/return. """ seen = [] while name in self._aliases: seen.append(name) name = self._aliases[name] # This is O(N ** 2), but if there's an alias chain longer than 2, # something strange has happened. if name in seen: seen.append(name) raise CyclicCalendarAlias( cycle=" -> ".join(repr(k) for k in seen) ) return name
[ "def", "resolve_alias", "(", "self", ",", "name", ")", ":", "seen", "=", "[", "]", "while", "name", "in", "self", ".", "_aliases", ":", "seen", ".", "append", "(", "name", ")", "name", "=", "self", ".", "_aliases", "[", "name", "]", "# This is O(N ** 2), but if there's an alias chain longer than 2,", "# something strange has happened.", "if", "name", "in", "seen", ":", "seen", ".", "append", "(", "name", ")", "raise", "CyclicCalendarAlias", "(", "cycle", "=", "\" -> \"", ".", "join", "(", "repr", "(", "k", ")", "for", "k", "in", "seen", ")", ")", "return", "name" ]
Resolve a calendar alias for retrieval. Parameters ---------- name : str The name of the requested calendar. Returns ------- canonical_name : str The real name of the calendar to create/return.
[ "Resolve", "a", "calendar", "alias", "for", "retrieval", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L257-L285
234,630
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.deregister_calendar
def deregister_calendar(self, name): """ If a calendar is registered with the given name, it is de-registered. Parameters ---------- cal_name : str The name of the calendar to be deregistered. """ self._calendars.pop(name, None) self._calendar_factories.pop(name, None) self._aliases.pop(name, None)
python
def deregister_calendar(self, name): """ If a calendar is registered with the given name, it is de-registered. Parameters ---------- cal_name : str The name of the calendar to be deregistered. """ self._calendars.pop(name, None) self._calendar_factories.pop(name, None) self._aliases.pop(name, None)
[ "def", "deregister_calendar", "(", "self", ",", "name", ")", ":", "self", ".", "_calendars", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "_calendar_factories", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "_aliases", ".", "pop", "(", "name", ",", "None", ")" ]
If a calendar is registered with the given name, it is de-registered. Parameters ---------- cal_name : str The name of the calendar to be deregistered.
[ "If", "a", "calendar", "is", "registered", "with", "the", "given", "name", "it", "is", "de", "-", "registered", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L287-L298
234,631
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.clear_calendars
def clear_calendars(self): """ Deregisters all current registered calendars """ self._calendars.clear() self._calendar_factories.clear() self._aliases.clear()
python
def clear_calendars(self): """ Deregisters all current registered calendars """ self._calendars.clear() self._calendar_factories.clear() self._aliases.clear()
[ "def", "clear_calendars", "(", "self", ")", ":", "self", ".", "_calendars", ".", "clear", "(", ")", "self", ".", "_calendar_factories", ".", "clear", "(", ")", "self", ".", "_aliases", ".", "clear", "(", ")" ]
Deregisters all current registered calendars
[ "Deregisters", "all", "current", "registered", "calendars" ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/calendar_utils.py#L300-L306
234,632
quantopian/trading_calendars
trading_calendars/trading_calendar.py
_overwrite_special_dates
def _overwrite_special_dates(midnight_utcs, opens_or_closes, special_opens_or_closes): """ Overwrite dates in open_or_closes with corresponding dates in special_opens_or_closes, using midnight_utcs for alignment. """ # Short circuit when nothing to apply. if not len(special_opens_or_closes): return len_m, len_oc = len(midnight_utcs), len(opens_or_closes) if len_m != len_oc: raise ValueError( "Found misaligned dates while building calendar.\n" "Expected midnight_utcs to be the same length as open_or_closes,\n" "but len(midnight_utcs)=%d, len(open_or_closes)=%d" % len_m, len_oc ) # Find the array indices corresponding to each special date. indexer = midnight_utcs.get_indexer(special_opens_or_closes.index) # -1 indicates that no corresponding entry was found. If any -1s are # present, then we have special dates that doesn't correspond to any # trading day. if -1 in indexer: bad_dates = list(special_opens_or_closes[indexer == -1]) raise ValueError("Special dates %s are not trading days." % bad_dates) # NOTE: This is a slightly dirty hack. We're in-place overwriting the # internal data of an Index, which is conceptually immutable. Since we're # maintaining sorting, this should be ok, but this is a good place to # sanity check if things start going haywire with calendar computations. opens_or_closes.values[indexer] = special_opens_or_closes.values
python
def _overwrite_special_dates(midnight_utcs, opens_or_closes, special_opens_or_closes): """ Overwrite dates in open_or_closes with corresponding dates in special_opens_or_closes, using midnight_utcs for alignment. """ # Short circuit when nothing to apply. if not len(special_opens_or_closes): return len_m, len_oc = len(midnight_utcs), len(opens_or_closes) if len_m != len_oc: raise ValueError( "Found misaligned dates while building calendar.\n" "Expected midnight_utcs to be the same length as open_or_closes,\n" "but len(midnight_utcs)=%d, len(open_or_closes)=%d" % len_m, len_oc ) # Find the array indices corresponding to each special date. indexer = midnight_utcs.get_indexer(special_opens_or_closes.index) # -1 indicates that no corresponding entry was found. If any -1s are # present, then we have special dates that doesn't correspond to any # trading day. if -1 in indexer: bad_dates = list(special_opens_or_closes[indexer == -1]) raise ValueError("Special dates %s are not trading days." % bad_dates) # NOTE: This is a slightly dirty hack. We're in-place overwriting the # internal data of an Index, which is conceptually immutable. Since we're # maintaining sorting, this should be ok, but this is a good place to # sanity check if things start going haywire with calendar computations. opens_or_closes.values[indexer] = special_opens_or_closes.values
[ "def", "_overwrite_special_dates", "(", "midnight_utcs", ",", "opens_or_closes", ",", "special_opens_or_closes", ")", ":", "# Short circuit when nothing to apply.", "if", "not", "len", "(", "special_opens_or_closes", ")", ":", "return", "len_m", ",", "len_oc", "=", "len", "(", "midnight_utcs", ")", ",", "len", "(", "opens_or_closes", ")", "if", "len_m", "!=", "len_oc", ":", "raise", "ValueError", "(", "\"Found misaligned dates while building calendar.\\n\"", "\"Expected midnight_utcs to be the same length as open_or_closes,\\n\"", "\"but len(midnight_utcs)=%d, len(open_or_closes)=%d\"", "%", "len_m", ",", "len_oc", ")", "# Find the array indices corresponding to each special date.", "indexer", "=", "midnight_utcs", ".", "get_indexer", "(", "special_opens_or_closes", ".", "index", ")", "# -1 indicates that no corresponding entry was found. If any -1s are", "# present, then we have special dates that doesn't correspond to any", "# trading day.", "if", "-", "1", "in", "indexer", ":", "bad_dates", "=", "list", "(", "special_opens_or_closes", "[", "indexer", "==", "-", "1", "]", ")", "raise", "ValueError", "(", "\"Special dates %s are not trading days.\"", "%", "bad_dates", ")", "# NOTE: This is a slightly dirty hack. We're in-place overwriting the", "# internal data of an Index, which is conceptually immutable. Since we're", "# maintaining sorting, this should be ok, but this is a good place to", "# sanity check if things start going haywire with calendar computations.", "opens_or_closes", ".", "values", "[", "indexer", "]", "=", "special_opens_or_closes", ".", "values" ]
Overwrite dates in open_or_closes with corresponding dates in special_opens_or_closes, using midnight_utcs for alignment.
[ "Overwrite", "dates", "in", "open_or_closes", "with", "corresponding", "dates", "in", "special_opens_or_closes", "using", "midnight_utcs", "for", "alignment", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L1016-L1049
234,633
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.is_open_on_minute
def is_open_on_minute(self, dt): """ Given a dt, return whether this exchange is open at the given dt. Parameters ---------- dt: pd.Timestamp The dt for which to check if this exchange is open. Returns ------- bool Whether the exchange is open on this dt. """ return is_open(self.market_opens_nanos, self.market_closes_nanos, dt.value)
python
def is_open_on_minute(self, dt): """ Given a dt, return whether this exchange is open at the given dt. Parameters ---------- dt: pd.Timestamp The dt for which to check if this exchange is open. Returns ------- bool Whether the exchange is open on this dt. """ return is_open(self.market_opens_nanos, self.market_closes_nanos, dt.value)
[ "def", "is_open_on_minute", "(", "self", ",", "dt", ")", ":", "return", "is_open", "(", "self", ".", "market_opens_nanos", ",", "self", ".", "market_closes_nanos", ",", "dt", ".", "value", ")" ]
Given a dt, return whether this exchange is open at the given dt. Parameters ---------- dt: pd.Timestamp The dt for which to check if this exchange is open. Returns ------- bool Whether the exchange is open on this dt.
[ "Given", "a", "dt", "return", "whether", "this", "exchange", "is", "open", "at", "the", "given", "dt", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L339-L354
234,634
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.next_open
def next_open(self, dt): """ Given a dt, returns the next open. If the given dt happens to be a session open, the next session's open will be returned. Parameters ---------- dt: pd.Timestamp The dt for which to get the next open. Returns ------- pd.Timestamp The UTC timestamp of the next open. """ idx = next_divider_idx(self.market_opens_nanos, dt.value) return pd.Timestamp(self.market_opens_nanos[idx], tz=UTC)
python
def next_open(self, dt): """ Given a dt, returns the next open. If the given dt happens to be a session open, the next session's open will be returned. Parameters ---------- dt: pd.Timestamp The dt for which to get the next open. Returns ------- pd.Timestamp The UTC timestamp of the next open. """ idx = next_divider_idx(self.market_opens_nanos, dt.value) return pd.Timestamp(self.market_opens_nanos[idx], tz=UTC)
[ "def", "next_open", "(", "self", ",", "dt", ")", ":", "idx", "=", "next_divider_idx", "(", "self", ".", "market_opens_nanos", ",", "dt", ".", "value", ")", "return", "pd", ".", "Timestamp", "(", "self", ".", "market_opens_nanos", "[", "idx", "]", ",", "tz", "=", "UTC", ")" ]
Given a dt, returns the next open. If the given dt happens to be a session open, the next session's open will be returned. Parameters ---------- dt: pd.Timestamp The dt for which to get the next open. Returns ------- pd.Timestamp The UTC timestamp of the next open.
[ "Given", "a", "dt", "returns", "the", "next", "open", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L356-L374
234,635
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.next_close
def next_close(self, dt): """ Given a dt, returns the next close. Parameters ---------- dt: pd.Timestamp The dt for which to get the next close. Returns ------- pd.Timestamp The UTC timestamp of the next close. """ idx = next_divider_idx(self.market_closes_nanos, dt.value) return pd.Timestamp(self.market_closes_nanos[idx], tz=UTC)
python
def next_close(self, dt): """ Given a dt, returns the next close. Parameters ---------- dt: pd.Timestamp The dt for which to get the next close. Returns ------- pd.Timestamp The UTC timestamp of the next close. """ idx = next_divider_idx(self.market_closes_nanos, dt.value) return pd.Timestamp(self.market_closes_nanos[idx], tz=UTC)
[ "def", "next_close", "(", "self", ",", "dt", ")", ":", "idx", "=", "next_divider_idx", "(", "self", ".", "market_closes_nanos", ",", "dt", ".", "value", ")", "return", "pd", ".", "Timestamp", "(", "self", ".", "market_closes_nanos", "[", "idx", "]", ",", "tz", "=", "UTC", ")" ]
Given a dt, returns the next close. Parameters ---------- dt: pd.Timestamp The dt for which to get the next close. Returns ------- pd.Timestamp The UTC timestamp of the next close.
[ "Given", "a", "dt", "returns", "the", "next", "close", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L376-L391
234,636
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.previous_open
def previous_open(self, dt): """ Given a dt, returns the previous open. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous open. Returns ------- pd.Timestamp The UTC imestamp of the previous open. """ idx = previous_divider_idx(self.market_opens_nanos, dt.value) return pd.Timestamp(self.market_opens_nanos[idx], tz=UTC)
python
def previous_open(self, dt): """ Given a dt, returns the previous open. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous open. Returns ------- pd.Timestamp The UTC imestamp of the previous open. """ idx = previous_divider_idx(self.market_opens_nanos, dt.value) return pd.Timestamp(self.market_opens_nanos[idx], tz=UTC)
[ "def", "previous_open", "(", "self", ",", "dt", ")", ":", "idx", "=", "previous_divider_idx", "(", "self", ".", "market_opens_nanos", ",", "dt", ".", "value", ")", "return", "pd", ".", "Timestamp", "(", "self", ".", "market_opens_nanos", "[", "idx", "]", ",", "tz", "=", "UTC", ")" ]
Given a dt, returns the previous open. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous open. Returns ------- pd.Timestamp The UTC imestamp of the previous open.
[ "Given", "a", "dt", "returns", "the", "previous", "open", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L393-L408
234,637
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.previous_close
def previous_close(self, dt): """ Given a dt, returns the previous close. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous close. Returns ------- pd.Timestamp The UTC timestamp of the previous close. """ idx = previous_divider_idx(self.market_closes_nanos, dt.value) return pd.Timestamp(self.market_closes_nanos[idx], tz=UTC)
python
def previous_close(self, dt): """ Given a dt, returns the previous close. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous close. Returns ------- pd.Timestamp The UTC timestamp of the previous close. """ idx = previous_divider_idx(self.market_closes_nanos, dt.value) return pd.Timestamp(self.market_closes_nanos[idx], tz=UTC)
[ "def", "previous_close", "(", "self", ",", "dt", ")", ":", "idx", "=", "previous_divider_idx", "(", "self", ".", "market_closes_nanos", ",", "dt", ".", "value", ")", "return", "pd", ".", "Timestamp", "(", "self", ".", "market_closes_nanos", "[", "idx", "]", ",", "tz", "=", "UTC", ")" ]
Given a dt, returns the previous close. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous close. Returns ------- pd.Timestamp The UTC timestamp of the previous close.
[ "Given", "a", "dt", "returns", "the", "previous", "close", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L410-L425
234,638
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.next_minute
def next_minute(self, dt): """ Given a dt, return the next exchange minute. If the given dt is not an exchange minute, returns the next exchange open. Parameters ---------- dt: pd.Timestamp The dt for which to get the next exchange minute. Returns ------- pd.Timestamp The next exchange minute. """ idx = next_divider_idx(self._trading_minutes_nanos, dt.value) return self.all_minutes[idx]
python
def next_minute(self, dt): """ Given a dt, return the next exchange minute. If the given dt is not an exchange minute, returns the next exchange open. Parameters ---------- dt: pd.Timestamp The dt for which to get the next exchange minute. Returns ------- pd.Timestamp The next exchange minute. """ idx = next_divider_idx(self._trading_minutes_nanos, dt.value) return self.all_minutes[idx]
[ "def", "next_minute", "(", "self", ",", "dt", ")", ":", "idx", "=", "next_divider_idx", "(", "self", ".", "_trading_minutes_nanos", ",", "dt", ".", "value", ")", "return", "self", ".", "all_minutes", "[", "idx", "]" ]
Given a dt, return the next exchange minute. If the given dt is not an exchange minute, returns the next exchange open. Parameters ---------- dt: pd.Timestamp The dt for which to get the next exchange minute. Returns ------- pd.Timestamp The next exchange minute.
[ "Given", "a", "dt", "return", "the", "next", "exchange", "minute", ".", "If", "the", "given", "dt", "is", "not", "an", "exchange", "minute", "returns", "the", "next", "exchange", "open", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L427-L443
234,639
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.previous_minute
def previous_minute(self, dt): """ Given a dt, return the previous exchange minute. Raises KeyError if the given timestamp is not an exchange minute. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous exchange minute. Returns ------- pd.Timestamp The previous exchange minute. """ idx = previous_divider_idx(self._trading_minutes_nanos, dt.value) return self.all_minutes[idx]
python
def previous_minute(self, dt): """ Given a dt, return the previous exchange minute. Raises KeyError if the given timestamp is not an exchange minute. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous exchange minute. Returns ------- pd.Timestamp The previous exchange minute. """ idx = previous_divider_idx(self._trading_minutes_nanos, dt.value) return self.all_minutes[idx]
[ "def", "previous_minute", "(", "self", ",", "dt", ")", ":", "idx", "=", "previous_divider_idx", "(", "self", ".", "_trading_minutes_nanos", ",", "dt", ".", "value", ")", "return", "self", ".", "all_minutes", "[", "idx", "]" ]
Given a dt, return the previous exchange minute. Raises KeyError if the given timestamp is not an exchange minute. Parameters ---------- dt: pd.Timestamp The dt for which to get the previous exchange minute. Returns ------- pd.Timestamp The previous exchange minute.
[ "Given", "a", "dt", "return", "the", "previous", "exchange", "minute", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L445-L463
234,640
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.next_session_label
def next_session_label(self, session_label): """ Given a session label, returns the label of the next session. Parameters ---------- session_label: pd.Timestamp A session whose next session is desired. Returns ------- pd.Timestamp The next session label (midnight UTC). Notes ----- Raises ValueError if the given session is the last session in this calendar. """ idx = self.schedule.index.get_loc(session_label) try: return self.schedule.index[idx + 1] except IndexError: if idx == len(self.schedule.index) - 1: raise ValueError("There is no next session as this is the end" " of the exchange calendar.") else: raise
python
def next_session_label(self, session_label): """ Given a session label, returns the label of the next session. Parameters ---------- session_label: pd.Timestamp A session whose next session is desired. Returns ------- pd.Timestamp The next session label (midnight UTC). Notes ----- Raises ValueError if the given session is the last session in this calendar. """ idx = self.schedule.index.get_loc(session_label) try: return self.schedule.index[idx + 1] except IndexError: if idx == len(self.schedule.index) - 1: raise ValueError("There is no next session as this is the end" " of the exchange calendar.") else: raise
[ "def", "next_session_label", "(", "self", ",", "session_label", ")", ":", "idx", "=", "self", ".", "schedule", ".", "index", ".", "get_loc", "(", "session_label", ")", "try", ":", "return", "self", ".", "schedule", ".", "index", "[", "idx", "+", "1", "]", "except", "IndexError", ":", "if", "idx", "==", "len", "(", "self", ".", "schedule", ".", "index", ")", "-", "1", ":", "raise", "ValueError", "(", "\"There is no next session as this is the end\"", "\" of the exchange calendar.\"", ")", "else", ":", "raise" ]
Given a session label, returns the label of the next session. Parameters ---------- session_label: pd.Timestamp A session whose next session is desired. Returns ------- pd.Timestamp The next session label (midnight UTC). Notes ----- Raises ValueError if the given session is the last session in this calendar.
[ "Given", "a", "session", "label", "returns", "the", "label", "of", "the", "next", "session", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L465-L492
234,641
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.previous_session_label
def previous_session_label(self, session_label): """ Given a session label, returns the label of the previous session. Parameters ---------- session_label: pd.Timestamp A session whose previous session is desired. Returns ------- pd.Timestamp The previous session label (midnight UTC). Notes ----- Raises ValueError if the given session is the first session in this calendar. """ idx = self.schedule.index.get_loc(session_label) if idx == 0: raise ValueError("There is no previous session as this is the" " beginning of the exchange calendar.") return self.schedule.index[idx - 1]
python
def previous_session_label(self, session_label): """ Given a session label, returns the label of the previous session. Parameters ---------- session_label: pd.Timestamp A session whose previous session is desired. Returns ------- pd.Timestamp The previous session label (midnight UTC). Notes ----- Raises ValueError if the given session is the first session in this calendar. """ idx = self.schedule.index.get_loc(session_label) if idx == 0: raise ValueError("There is no previous session as this is the" " beginning of the exchange calendar.") return self.schedule.index[idx - 1]
[ "def", "previous_session_label", "(", "self", ",", "session_label", ")", ":", "idx", "=", "self", ".", "schedule", ".", "index", ".", "get_loc", "(", "session_label", ")", "if", "idx", "==", "0", ":", "raise", "ValueError", "(", "\"There is no previous session as this is the\"", "\" beginning of the exchange calendar.\"", ")", "return", "self", ".", "schedule", ".", "index", "[", "idx", "-", "1", "]" ]
Given a session label, returns the label of the previous session. Parameters ---------- session_label: pd.Timestamp A session whose previous session is desired. Returns ------- pd.Timestamp The previous session label (midnight UTC). Notes ----- Raises ValueError if the given session is the first session in this calendar.
[ "Given", "a", "session", "label", "returns", "the", "label", "of", "the", "previous", "session", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L494-L518
234,642
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.minutes_for_session
def minutes_for_session(self, session_label): """ Given a session label, return the minutes for that session. Parameters ---------- session_label: pd.Timestamp (midnight UTC) A session label whose session's minutes are desired. Returns ------- pd.DateTimeIndex All the minutes for the given session. """ return self.minutes_in_range( start_minute=self.schedule.at[session_label, 'market_open'], end_minute=self.schedule.at[session_label, 'market_close'], )
python
def minutes_for_session(self, session_label): """ Given a session label, return the minutes for that session. Parameters ---------- session_label: pd.Timestamp (midnight UTC) A session label whose session's minutes are desired. Returns ------- pd.DateTimeIndex All the minutes for the given session. """ return self.minutes_in_range( start_minute=self.schedule.at[session_label, 'market_open'], end_minute=self.schedule.at[session_label, 'market_close'], )
[ "def", "minutes_for_session", "(", "self", ",", "session_label", ")", ":", "return", "self", ".", "minutes_in_range", "(", "start_minute", "=", "self", ".", "schedule", ".", "at", "[", "session_label", ",", "'market_open'", "]", ",", "end_minute", "=", "self", ".", "schedule", ".", "at", "[", "session_label", ",", "'market_close'", "]", ",", ")" ]
Given a session label, return the minutes for that session. Parameters ---------- session_label: pd.Timestamp (midnight UTC) A session label whose session's minutes are desired. Returns ------- pd.DateTimeIndex All the minutes for the given session.
[ "Given", "a", "session", "label", "return", "the", "minutes", "for", "that", "session", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L520-L537
234,643
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.execution_minutes_for_session
def execution_minutes_for_session(self, session_label): """ Given a session label, return the execution minutes for that session. Parameters ---------- session_label: pd.Timestamp (midnight UTC) A session label whose session's minutes are desired. Returns ------- pd.DateTimeIndex All the execution minutes for the given session. """ return self.minutes_in_range( start_minute=self.execution_time_from_open( self.schedule.at[session_label, 'market_open'], ), end_minute=self.execution_time_from_close( self.schedule.at[session_label, 'market_close'], ), )
python
def execution_minutes_for_session(self, session_label): """ Given a session label, return the execution minutes for that session. Parameters ---------- session_label: pd.Timestamp (midnight UTC) A session label whose session's minutes are desired. Returns ------- pd.DateTimeIndex All the execution minutes for the given session. """ return self.minutes_in_range( start_minute=self.execution_time_from_open( self.schedule.at[session_label, 'market_open'], ), end_minute=self.execution_time_from_close( self.schedule.at[session_label, 'market_close'], ), )
[ "def", "execution_minutes_for_session", "(", "self", ",", "session_label", ")", ":", "return", "self", ".", "minutes_in_range", "(", "start_minute", "=", "self", ".", "execution_time_from_open", "(", "self", ".", "schedule", ".", "at", "[", "session_label", ",", "'market_open'", "]", ",", ")", ",", "end_minute", "=", "self", ".", "execution_time_from_close", "(", "self", ".", "schedule", ".", "at", "[", "session_label", ",", "'market_close'", "]", ",", ")", ",", ")" ]
Given a session label, return the execution minutes for that session. Parameters ---------- session_label: pd.Timestamp (midnight UTC) A session label whose session's minutes are desired. Returns ------- pd.DateTimeIndex All the execution minutes for the given session.
[ "Given", "a", "session", "label", "return", "the", "execution", "minutes", "for", "that", "session", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L539-L560
234,644
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.sessions_in_range
def sessions_in_range(self, start_session_label, end_session_label): """ Given start and end session labels, return all the sessions in that range, inclusive. Parameters ---------- start_session_label: pd.Timestamp (midnight UTC) The label representing the first session of the desired range. end_session_label: pd.Timestamp (midnight UTC) The label representing the last session of the desired range. Returns ------- pd.DatetimeIndex The desired sessions. """ return self.all_sessions[ self.all_sessions.slice_indexer( start_session_label, end_session_label ) ]
python
def sessions_in_range(self, start_session_label, end_session_label): """ Given start and end session labels, return all the sessions in that range, inclusive. Parameters ---------- start_session_label: pd.Timestamp (midnight UTC) The label representing the first session of the desired range. end_session_label: pd.Timestamp (midnight UTC) The label representing the last session of the desired range. Returns ------- pd.DatetimeIndex The desired sessions. """ return self.all_sessions[ self.all_sessions.slice_indexer( start_session_label, end_session_label ) ]
[ "def", "sessions_in_range", "(", "self", ",", "start_session_label", ",", "end_session_label", ")", ":", "return", "self", ".", "all_sessions", "[", "self", ".", "all_sessions", ".", "slice_indexer", "(", "start_session_label", ",", "end_session_label", ")", "]" ]
Given start and end session labels, return all the sessions in that range, inclusive. Parameters ---------- start_session_label: pd.Timestamp (midnight UTC) The label representing the first session of the desired range. end_session_label: pd.Timestamp (midnight UTC) The label representing the last session of the desired range. Returns ------- pd.DatetimeIndex The desired sessions.
[ "Given", "start", "and", "end", "session", "labels", "return", "all", "the", "sessions", "in", "that", "range", "inclusive", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L592-L615
234,645
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.minutes_in_range
def minutes_in_range(self, start_minute, end_minute): """ Given start and end minutes, return all the calendar minutes in that range, inclusive. Given minutes don't need to be calendar minutes. Parameters ---------- start_minute: pd.Timestamp The minute representing the start of the desired range. end_minute: pd.Timestamp The minute representing the end of the desired range. Returns ------- pd.DatetimeIndex The minutes in the desired range. """ start_idx = searchsorted(self._trading_minutes_nanos, start_minute.value) end_idx = searchsorted(self._trading_minutes_nanos, end_minute.value) if end_minute.value == self._trading_minutes_nanos[end_idx]: # if the end minute is a market minute, increase by 1 end_idx += 1 return self.all_minutes[start_idx:end_idx]
python
def minutes_in_range(self, start_minute, end_minute): """ Given start and end minutes, return all the calendar minutes in that range, inclusive. Given minutes don't need to be calendar minutes. Parameters ---------- start_minute: pd.Timestamp The minute representing the start of the desired range. end_minute: pd.Timestamp The minute representing the end of the desired range. Returns ------- pd.DatetimeIndex The minutes in the desired range. """ start_idx = searchsorted(self._trading_minutes_nanos, start_minute.value) end_idx = searchsorted(self._trading_minutes_nanos, end_minute.value) if end_minute.value == self._trading_minutes_nanos[end_idx]: # if the end minute is a market minute, increase by 1 end_idx += 1 return self.all_minutes[start_idx:end_idx]
[ "def", "minutes_in_range", "(", "self", ",", "start_minute", ",", "end_minute", ")", ":", "start_idx", "=", "searchsorted", "(", "self", ".", "_trading_minutes_nanos", ",", "start_minute", ".", "value", ")", "end_idx", "=", "searchsorted", "(", "self", ".", "_trading_minutes_nanos", ",", "end_minute", ".", "value", ")", "if", "end_minute", ".", "value", "==", "self", ".", "_trading_minutes_nanos", "[", "end_idx", "]", ":", "# if the end minute is a market minute, increase by 1", "end_idx", "+=", "1", "return", "self", ".", "all_minutes", "[", "start_idx", ":", "end_idx", "]" ]
Given start and end minutes, return all the calendar minutes in that range, inclusive. Given minutes don't need to be calendar minutes. Parameters ---------- start_minute: pd.Timestamp The minute representing the start of the desired range. end_minute: pd.Timestamp The minute representing the end of the desired range. Returns ------- pd.DatetimeIndex The minutes in the desired range.
[ "Given", "start", "and", "end", "minutes", "return", "all", "the", "calendar", "minutes", "in", "that", "range", "inclusive", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L681-L711
234,646
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.minutes_for_sessions_in_range
def minutes_for_sessions_in_range(self, start_session_label, end_session_label): """ Returns all the minutes for all the sessions from the given start session label to the given end session label, inclusive. Parameters ---------- start_session_label: pd.Timestamp The label of the first session in the range. end_session_label: pd.Timestamp The label of the last session in the range. Returns ------- pd.DatetimeIndex The minutes in the desired range. """ first_minute, _ = self.open_and_close_for_session(start_session_label) _, last_minute = self.open_and_close_for_session(end_session_label) return self.minutes_in_range(first_minute, last_minute)
python
def minutes_for_sessions_in_range(self, start_session_label, end_session_label): """ Returns all the minutes for all the sessions from the given start session label to the given end session label, inclusive. Parameters ---------- start_session_label: pd.Timestamp The label of the first session in the range. end_session_label: pd.Timestamp The label of the last session in the range. Returns ------- pd.DatetimeIndex The minutes in the desired range. """ first_minute, _ = self.open_and_close_for_session(start_session_label) _, last_minute = self.open_and_close_for_session(end_session_label) return self.minutes_in_range(first_minute, last_minute)
[ "def", "minutes_for_sessions_in_range", "(", "self", ",", "start_session_label", ",", "end_session_label", ")", ":", "first_minute", ",", "_", "=", "self", ".", "open_and_close_for_session", "(", "start_session_label", ")", "_", ",", "last_minute", "=", "self", ".", "open_and_close_for_session", "(", "end_session_label", ")", "return", "self", ".", "minutes_in_range", "(", "first_minute", ",", "last_minute", ")" ]
Returns all the minutes for all the sessions from the given start session label to the given end session label, inclusive. Parameters ---------- start_session_label: pd.Timestamp The label of the first session in the range. end_session_label: pd.Timestamp The label of the last session in the range. Returns ------- pd.DatetimeIndex The minutes in the desired range.
[ "Returns", "all", "the", "minutes", "for", "all", "the", "sessions", "from", "the", "given", "start", "session", "label", "to", "the", "given", "end", "session", "label", "inclusive", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L713-L737
234,647
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.open_and_close_for_session
def open_and_close_for_session(self, session_label): """ Returns a tuple of timestamps of the open and close of the session represented by the given label. Parameters ---------- session_label: pd.Timestamp The session whose open and close are desired. Returns ------- (Timestamp, Timestamp) The open and close for the given session. """ sched = self.schedule # `market_open` and `market_close` should be timezone aware, but pandas # 0.16.1 does not appear to support this: # http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#datetime-with-tz # noqa return ( sched.at[session_label, 'market_open'].tz_localize(UTC), sched.at[session_label, 'market_close'].tz_localize(UTC), )
python
def open_and_close_for_session(self, session_label): """ Returns a tuple of timestamps of the open and close of the session represented by the given label. Parameters ---------- session_label: pd.Timestamp The session whose open and close are desired. Returns ------- (Timestamp, Timestamp) The open and close for the given session. """ sched = self.schedule # `market_open` and `market_close` should be timezone aware, but pandas # 0.16.1 does not appear to support this: # http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#datetime-with-tz # noqa return ( sched.at[session_label, 'market_open'].tz_localize(UTC), sched.at[session_label, 'market_close'].tz_localize(UTC), )
[ "def", "open_and_close_for_session", "(", "self", ",", "session_label", ")", ":", "sched", "=", "self", ".", "schedule", "# `market_open` and `market_close` should be timezone aware, but pandas", "# 0.16.1 does not appear to support this:", "# http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#datetime-with-tz # noqa", "return", "(", "sched", ".", "at", "[", "session_label", ",", "'market_open'", "]", ".", "tz_localize", "(", "UTC", ")", ",", "sched", ".", "at", "[", "session_label", ",", "'market_close'", "]", ".", "tz_localize", "(", "UTC", ")", ",", ")" ]
Returns a tuple of timestamps of the open and close of the session represented by the given label. Parameters ---------- session_label: pd.Timestamp The session whose open and close are desired. Returns ------- (Timestamp, Timestamp) The open and close for the given session.
[ "Returns", "a", "tuple", "of", "timestamps", "of", "the", "open", "and", "close", "of", "the", "session", "represented", "by", "the", "given", "label", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L739-L762
234,648
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.all_minutes
def all_minutes(self): """ Returns a DatetimeIndex representing all the minutes in this calendar. """ opens_in_ns = self._opens.values.astype( 'datetime64[ns]', ).view('int64') closes_in_ns = self._closes.values.astype( 'datetime64[ns]', ).view('int64') return DatetimeIndex( compute_all_minutes(opens_in_ns, closes_in_ns), tz=UTC, )
python
def all_minutes(self): """ Returns a DatetimeIndex representing all the minutes in this calendar. """ opens_in_ns = self._opens.values.astype( 'datetime64[ns]', ).view('int64') closes_in_ns = self._closes.values.astype( 'datetime64[ns]', ).view('int64') return DatetimeIndex( compute_all_minutes(opens_in_ns, closes_in_ns), tz=UTC, )
[ "def", "all_minutes", "(", "self", ")", ":", "opens_in_ns", "=", "self", ".", "_opens", ".", "values", ".", "astype", "(", "'datetime64[ns]'", ",", ")", ".", "view", "(", "'int64'", ")", "closes_in_ns", "=", "self", ".", "_closes", ".", "values", ".", "astype", "(", "'datetime64[ns]'", ",", ")", ".", "view", "(", "'int64'", ")", "return", "DatetimeIndex", "(", "compute_all_minutes", "(", "opens_in_ns", ",", "closes_in_ns", ")", ",", "tz", "=", "UTC", ",", ")" ]
Returns a DatetimeIndex representing all the minutes in this calendar.
[ "Returns", "a", "DatetimeIndex", "representing", "all", "the", "minutes", "in", "this", "calendar", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L807-L822
234,649
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.minute_to_session_label
def minute_to_session_label(self, dt, direction="next"): """ Given a minute, get the label of its containing session. Parameters ---------- dt : pd.Timestamp or nanosecond offset The dt for which to get the containing session. direction: str "next" (default) means that if the given dt is not part of a session, return the label of the next session. "previous" means that if the given dt is not part of a session, return the label of the previous session. "none" means that a KeyError will be raised if the given dt is not part of a session. Returns ------- pd.Timestamp (midnight UTC) The label of the containing session. """ if direction == "next": try: return self._minute_to_session_label_cache[dt] except KeyError: pass idx = searchsorted(self.market_closes_nanos, dt) current_or_next_session = self.schedule.index[idx] self._minute_to_session_label_cache[dt] = current_or_next_session if direction == "next": return current_or_next_session elif direction == "previous": if not is_open(self.market_opens_nanos, self.market_closes_nanos, dt): # if the exchange is closed, use the previous session return self.schedule.index[idx - 1] elif direction == "none": if not is_open(self.market_opens_nanos, self.market_closes_nanos, dt): # if the exchange is closed, blow up raise ValueError("The given dt is not an exchange minute!") else: # invalid direction raise ValueError("Invalid direction parameter: " "{0}".format(direction)) return current_or_next_session
python
def minute_to_session_label(self, dt, direction="next"): """ Given a minute, get the label of its containing session. Parameters ---------- dt : pd.Timestamp or nanosecond offset The dt for which to get the containing session. direction: str "next" (default) means that if the given dt is not part of a session, return the label of the next session. "previous" means that if the given dt is not part of a session, return the label of the previous session. "none" means that a KeyError will be raised if the given dt is not part of a session. Returns ------- pd.Timestamp (midnight UTC) The label of the containing session. """ if direction == "next": try: return self._minute_to_session_label_cache[dt] except KeyError: pass idx = searchsorted(self.market_closes_nanos, dt) current_or_next_session = self.schedule.index[idx] self._minute_to_session_label_cache[dt] = current_or_next_session if direction == "next": return current_or_next_session elif direction == "previous": if not is_open(self.market_opens_nanos, self.market_closes_nanos, dt): # if the exchange is closed, use the previous session return self.schedule.index[idx - 1] elif direction == "none": if not is_open(self.market_opens_nanos, self.market_closes_nanos, dt): # if the exchange is closed, blow up raise ValueError("The given dt is not an exchange minute!") else: # invalid direction raise ValueError("Invalid direction parameter: " "{0}".format(direction)) return current_or_next_session
[ "def", "minute_to_session_label", "(", "self", ",", "dt", ",", "direction", "=", "\"next\"", ")", ":", "if", "direction", "==", "\"next\"", ":", "try", ":", "return", "self", ".", "_minute_to_session_label_cache", "[", "dt", "]", "except", "KeyError", ":", "pass", "idx", "=", "searchsorted", "(", "self", ".", "market_closes_nanos", ",", "dt", ")", "current_or_next_session", "=", "self", ".", "schedule", ".", "index", "[", "idx", "]", "self", ".", "_minute_to_session_label_cache", "[", "dt", "]", "=", "current_or_next_session", "if", "direction", "==", "\"next\"", ":", "return", "current_or_next_session", "elif", "direction", "==", "\"previous\"", ":", "if", "not", "is_open", "(", "self", ".", "market_opens_nanos", ",", "self", ".", "market_closes_nanos", ",", "dt", ")", ":", "# if the exchange is closed, use the previous session", "return", "self", ".", "schedule", ".", "index", "[", "idx", "-", "1", "]", "elif", "direction", "==", "\"none\"", ":", "if", "not", "is_open", "(", "self", ".", "market_opens_nanos", ",", "self", ".", "market_closes_nanos", ",", "dt", ")", ":", "# if the exchange is closed, blow up", "raise", "ValueError", "(", "\"The given dt is not an exchange minute!\"", ")", "else", ":", "# invalid direction", "raise", "ValueError", "(", "\"Invalid direction parameter: \"", "\"{0}\"", ".", "format", "(", "direction", ")", ")", "return", "current_or_next_session" ]
Given a minute, get the label of its containing session. Parameters ---------- dt : pd.Timestamp or nanosecond offset The dt for which to get the containing session. direction: str "next" (default) means that if the given dt is not part of a session, return the label of the next session. "previous" means that if the given dt is not part of a session, return the label of the previous session. "none" means that a KeyError will be raised if the given dt is not part of a session. Returns ------- pd.Timestamp (midnight UTC) The label of the containing session.
[ "Given", "a", "minute", "get", "the", "label", "of", "its", "containing", "session", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L825-L876
234,650
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar.minute_index_to_session_labels
def minute_index_to_session_labels(self, index): """ Given a sorted DatetimeIndex of market minutes, return a DatetimeIndex of the corresponding session labels. Parameters ---------- index: pd.DatetimeIndex or pd.Series The ordered list of market minutes we want session labels for. Returns ------- pd.DatetimeIndex (UTC) The list of session labels corresponding to the given minutes. """ if not index.is_monotonic_increasing: raise ValueError( "Non-ordered index passed to minute_index_to_session_labels." ) # Find the indices of the previous open and the next close for each # minute. prev_opens = ( self._opens.values.searchsorted(index.values, side='right') - 1 ) next_closes = ( self._closes.values.searchsorted(index.values, side='left') ) # If they don't match, the minute is outside the trading day. Barf. mismatches = (prev_opens != next_closes) if mismatches.any(): # Show the first bad minute in the error message. bad_ix = np.flatnonzero(mismatches)[0] example = index[bad_ix] prev_day = prev_opens[bad_ix] prev_open, prev_close = self.schedule.iloc[prev_day] next_open, next_close = self.schedule.iloc[prev_day + 1] raise ValueError( "{num} non-market minutes in minute_index_to_session_labels:\n" "First Bad Minute: {first_bad}\n" "Previous Session: {prev_open} -> {prev_close}\n" "Next Session: {next_open} -> {next_close}" .format( num=mismatches.sum(), first_bad=example, prev_open=prev_open, prev_close=prev_close, next_open=next_open, next_close=next_close) ) return self.schedule.index[prev_opens]
python
def minute_index_to_session_labels(self, index): """ Given a sorted DatetimeIndex of market minutes, return a DatetimeIndex of the corresponding session labels. Parameters ---------- index: pd.DatetimeIndex or pd.Series The ordered list of market minutes we want session labels for. Returns ------- pd.DatetimeIndex (UTC) The list of session labels corresponding to the given minutes. """ if not index.is_monotonic_increasing: raise ValueError( "Non-ordered index passed to minute_index_to_session_labels." ) # Find the indices of the previous open and the next close for each # minute. prev_opens = ( self._opens.values.searchsorted(index.values, side='right') - 1 ) next_closes = ( self._closes.values.searchsorted(index.values, side='left') ) # If they don't match, the minute is outside the trading day. Barf. mismatches = (prev_opens != next_closes) if mismatches.any(): # Show the first bad minute in the error message. bad_ix = np.flatnonzero(mismatches)[0] example = index[bad_ix] prev_day = prev_opens[bad_ix] prev_open, prev_close = self.schedule.iloc[prev_day] next_open, next_close = self.schedule.iloc[prev_day + 1] raise ValueError( "{num} non-market minutes in minute_index_to_session_labels:\n" "First Bad Minute: {first_bad}\n" "Previous Session: {prev_open} -> {prev_close}\n" "Next Session: {next_open} -> {next_close}" .format( num=mismatches.sum(), first_bad=example, prev_open=prev_open, prev_close=prev_close, next_open=next_open, next_close=next_close) ) return self.schedule.index[prev_opens]
[ "def", "minute_index_to_session_labels", "(", "self", ",", "index", ")", ":", "if", "not", "index", ".", "is_monotonic_increasing", ":", "raise", "ValueError", "(", "\"Non-ordered index passed to minute_index_to_session_labels.\"", ")", "# Find the indices of the previous open and the next close for each", "# minute.", "prev_opens", "=", "(", "self", ".", "_opens", ".", "values", ".", "searchsorted", "(", "index", ".", "values", ",", "side", "=", "'right'", ")", "-", "1", ")", "next_closes", "=", "(", "self", ".", "_closes", ".", "values", ".", "searchsorted", "(", "index", ".", "values", ",", "side", "=", "'left'", ")", ")", "# If they don't match, the minute is outside the trading day. Barf.", "mismatches", "=", "(", "prev_opens", "!=", "next_closes", ")", "if", "mismatches", ".", "any", "(", ")", ":", "# Show the first bad minute in the error message.", "bad_ix", "=", "np", ".", "flatnonzero", "(", "mismatches", ")", "[", "0", "]", "example", "=", "index", "[", "bad_ix", "]", "prev_day", "=", "prev_opens", "[", "bad_ix", "]", "prev_open", ",", "prev_close", "=", "self", ".", "schedule", ".", "iloc", "[", "prev_day", "]", "next_open", ",", "next_close", "=", "self", ".", "schedule", ".", "iloc", "[", "prev_day", "+", "1", "]", "raise", "ValueError", "(", "\"{num} non-market minutes in minute_index_to_session_labels:\\n\"", "\"First Bad Minute: {first_bad}\\n\"", "\"Previous Session: {prev_open} -> {prev_close}\\n\"", "\"Next Session: {next_open} -> {next_close}\"", ".", "format", "(", "num", "=", "mismatches", ".", "sum", "(", ")", ",", "first_bad", "=", "example", ",", "prev_open", "=", "prev_open", ",", "prev_close", "=", "prev_close", ",", "next_open", "=", "next_open", ",", "next_close", "=", "next_close", ")", ")", "return", "self", ".", "schedule", ".", "index", "[", "prev_opens", "]" ]
Given a sorted DatetimeIndex of market minutes, return a DatetimeIndex of the corresponding session labels. Parameters ---------- index: pd.DatetimeIndex or pd.Series The ordered list of market minutes we want session labels for. Returns ------- pd.DatetimeIndex (UTC) The list of session labels corresponding to the given minutes.
[ "Given", "a", "sorted", "DatetimeIndex", "of", "market", "minutes", "return", "a", "DatetimeIndex", "of", "the", "corresponding", "session", "labels", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L878-L930
234,651
quantopian/trading_calendars
trading_calendars/trading_calendar.py
TradingCalendar._special_dates
def _special_dates(self, calendars, ad_hoc_dates, start_date, end_date): """ Compute a Series of times associated with special dates. Parameters ---------- holiday_calendars : list[(datetime.time, HolidayCalendar)] Pairs of time and calendar describing when that time occurs. These are used to describe regularly-scheduled late opens or early closes. ad_hoc_dates : list[(datetime.time, list[pd.Timestamp])] Pairs of time and list of dates associated with the given times. These are used to describe late opens or early closes that occurred for unscheduled or otherwise irregular reasons. start_date : pd.Timestamp Start of the range for which we should calculate special dates. end_date : pd.Timestamp End of the range for which we should calculate special dates. Returns ------- special_dates : pd.Series Series mapping trading sessions with special opens/closes to the special open/close for that session. """ # List of Series for regularly-scheduled times. regular = [ scheduled_special_times( calendar, start_date, end_date, time_, self.tz, ) for time_, calendar in calendars ] # List of Series for ad-hoc times. ad_hoc = [ pd.Series( index=pd.to_datetime(datetimes, utc=True), data=days_at_time(datetimes, time_, self.tz), ) for time_, datetimes in ad_hoc_dates ] merged = regular + ad_hoc if not merged: # Concat barfs if the input has length 0. return pd.Series([]) result = pd.concat(merged).sort_index() return result.loc[(result >= start_date) & (result <= end_date)]
python
def _special_dates(self, calendars, ad_hoc_dates, start_date, end_date): """ Compute a Series of times associated with special dates. Parameters ---------- holiday_calendars : list[(datetime.time, HolidayCalendar)] Pairs of time and calendar describing when that time occurs. These are used to describe regularly-scheduled late opens or early closes. ad_hoc_dates : list[(datetime.time, list[pd.Timestamp])] Pairs of time and list of dates associated with the given times. These are used to describe late opens or early closes that occurred for unscheduled or otherwise irregular reasons. start_date : pd.Timestamp Start of the range for which we should calculate special dates. end_date : pd.Timestamp End of the range for which we should calculate special dates. Returns ------- special_dates : pd.Series Series mapping trading sessions with special opens/closes to the special open/close for that session. """ # List of Series for regularly-scheduled times. regular = [ scheduled_special_times( calendar, start_date, end_date, time_, self.tz, ) for time_, calendar in calendars ] # List of Series for ad-hoc times. ad_hoc = [ pd.Series( index=pd.to_datetime(datetimes, utc=True), data=days_at_time(datetimes, time_, self.tz), ) for time_, datetimes in ad_hoc_dates ] merged = regular + ad_hoc if not merged: # Concat barfs if the input has length 0. return pd.Series([]) result = pd.concat(merged).sort_index() return result.loc[(result >= start_date) & (result <= end_date)]
[ "def", "_special_dates", "(", "self", ",", "calendars", ",", "ad_hoc_dates", ",", "start_date", ",", "end_date", ")", ":", "# List of Series for regularly-scheduled times.", "regular", "=", "[", "scheduled_special_times", "(", "calendar", ",", "start_date", ",", "end_date", ",", "time_", ",", "self", ".", "tz", ",", ")", "for", "time_", ",", "calendar", "in", "calendars", "]", "# List of Series for ad-hoc times.", "ad_hoc", "=", "[", "pd", ".", "Series", "(", "index", "=", "pd", ".", "to_datetime", "(", "datetimes", ",", "utc", "=", "True", ")", ",", "data", "=", "days_at_time", "(", "datetimes", ",", "time_", ",", "self", ".", "tz", ")", ",", ")", "for", "time_", ",", "datetimes", "in", "ad_hoc_dates", "]", "merged", "=", "regular", "+", "ad_hoc", "if", "not", "merged", ":", "# Concat barfs if the input has length 0.", "return", "pd", ".", "Series", "(", "[", "]", ")", "result", "=", "pd", ".", "concat", "(", "merged", ")", ".", "sort_index", "(", ")", "return", "result", ".", "loc", "[", "(", "result", ">=", "start_date", ")", "&", "(", "result", "<=", "end_date", ")", "]" ]
Compute a Series of times associated with special dates. Parameters ---------- holiday_calendars : list[(datetime.time, HolidayCalendar)] Pairs of time and calendar describing when that time occurs. These are used to describe regularly-scheduled late opens or early closes. ad_hoc_dates : list[(datetime.time, list[pd.Timestamp])] Pairs of time and list of dates associated with the given times. These are used to describe late opens or early closes that occurred for unscheduled or otherwise irregular reasons. start_date : pd.Timestamp Start of the range for which we should calculate special dates. end_date : pd.Timestamp End of the range for which we should calculate special dates. Returns ------- special_dates : pd.Series Series mapping trading sessions with special opens/closes to the special open/close for that session.
[ "Compute", "a", "Series", "of", "times", "associated", "with", "special", "dates", "." ]
951711c82c8a2875c09e96e2979faaf8734fb4df
https://github.com/quantopian/trading_calendars/blob/951711c82c8a2875c09e96e2979faaf8734fb4df/trading_calendars/trading_calendar.py#L932-L984
234,652
datahq/dataflows
setup.py
read
def read(*paths): """Read a text file.""" basedir = os.path.dirname(__file__) fullpath = os.path.join(basedir, *paths) contents = io.open(fullpath, encoding='utf-8').read().strip() return contents
python
def read(*paths): """Read a text file.""" basedir = os.path.dirname(__file__) fullpath = os.path.join(basedir, *paths) contents = io.open(fullpath, encoding='utf-8').read().strip() return contents
[ "def", "read", "(", "*", "paths", ")", ":", "basedir", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "fullpath", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "*", "paths", ")", "contents", "=", "io", ".", "open", "(", "fullpath", ",", "encoding", "=", "'utf-8'", ")", ".", "read", "(", ")", ".", "strip", "(", ")", "return", "contents" ]
Read a text file.
[ "Read", "a", "text", "file", "." ]
2c5e5e01e09c8b44e0ff36d85b3f2f4dcf4e8465
https://github.com/datahq/dataflows/blob/2c5e5e01e09c8b44e0ff36d85b3f2f4dcf4e8465/setup.py#L12-L17
234,653
ipfs/py-ipfs-api
ipfsapi/encoding.py
Encoding.parse
def parse(self, raw): """Returns a Python object decoded from the bytes of this encoding. Raises ------ ~ipfsapi.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- object """ results = list(self.parse_partial(raw)) results.extend(self.parse_finalize()) return results[0] if len(results) == 1 else results
python
def parse(self, raw): """Returns a Python object decoded from the bytes of this encoding. Raises ------ ~ipfsapi.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- object """ results = list(self.parse_partial(raw)) results.extend(self.parse_finalize()) return results[0] if len(results) == 1 else results
[ "def", "parse", "(", "self", ",", "raw", ")", ":", "results", "=", "list", "(", "self", ".", "parse_partial", "(", "raw", ")", ")", "results", ".", "extend", "(", "self", ".", "parse_finalize", "(", ")", ")", "return", "results", "[", "0", "]", "if", "len", "(", "results", ")", "==", "1", "else", "results" ]
Returns a Python object decoded from the bytes of this encoding. Raises ------ ~ipfsapi.exceptions.DecodingError Parameters ---------- raw : bytes Data to be parsed Returns ------- object
[ "Returns", "a", "Python", "object", "decoded", "from", "the", "bytes", "of", "this", "encoding", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/encoding.py#L60-L78
234,654
ipfs/py-ipfs-api
ipfsapi/encoding.py
Json.parse_partial
def parse_partial(self, data): """Incrementally decodes JSON data sets into Python objects. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ try: # Python 3 requires all JSON data to be a text string lines = self._decoder1.decode(data, False).split("\n") # Add first input line to last buffer line, if applicable, to # handle cases where the JSON string has been chopped in half # at the network level due to streaming if len(self._buffer) > 0 and self._buffer[-1] is not None: self._buffer[-1] += lines[0] self._buffer.extend(lines[1:]) else: self._buffer.extend(lines) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) # Process data buffer index = 0 try: # Process each line as separate buffer #PERF: This way the `.lstrip()` call becomes almost always a NOP # even if it does return a different string it will only # have to allocate a new buffer for the currently processed # line. while index < len(self._buffer): while self._buffer[index]: # Make sure buffer does not start with whitespace #PERF: `.lstrip()` does not reallocate if the string does # not actually start with whitespace. self._buffer[index] = self._buffer[index].lstrip() # Handle case where the remainder of the line contained # only whitespace if not self._buffer[index]: self._buffer[index] = None continue # Try decoding the partial data buffer and return results # from this data = self._buffer[index] for index2 in range(index, len(self._buffer)): # If decoding doesn't succeed with the currently # selected buffer (very unlikely with our current # class of input data) then retry with appending # any other pending pieces of input data # This will happen with JSON data that contains # arbitrary new-lines: "{1:\n2,\n3:4}" if index2 > index: data += "\n" + self._buffer[index2] try: (obj, offset) = self._decoder2.raw_decode(data) except ValueError: # Treat error as fatal if we have already added # the final buffer to the input if (index2 + 1) == len(self._buffer): raise else: index = index2 break # Decoding succeeded – yield result and shorten buffer yield obj if offset < len(self._buffer[index]): self._buffer[index] = self._buffer[index][offset:] else: self._buffer[index] = None index += 1 except ValueError as error: # It is unfortunately not possible to reliably detect whether # parsing ended because of an error *within* the JSON string, or # an unexpected *end* of the JSON string. # We therefor have to assume that any error that occurs here # *might* be related to the JSON parser hitting EOF and therefor # have to postpone error reporting until `parse_finalize` is # called. self._lasterror = error finally: # Remove all processed buffers del self._buffer[0:index]
python
def parse_partial(self, data): """Incrementally decodes JSON data sets into Python objects. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ try: # Python 3 requires all JSON data to be a text string lines = self._decoder1.decode(data, False).split("\n") # Add first input line to last buffer line, if applicable, to # handle cases where the JSON string has been chopped in half # at the network level due to streaming if len(self._buffer) > 0 and self._buffer[-1] is not None: self._buffer[-1] += lines[0] self._buffer.extend(lines[1:]) else: self._buffer.extend(lines) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) # Process data buffer index = 0 try: # Process each line as separate buffer #PERF: This way the `.lstrip()` call becomes almost always a NOP # even if it does return a different string it will only # have to allocate a new buffer for the currently processed # line. while index < len(self._buffer): while self._buffer[index]: # Make sure buffer does not start with whitespace #PERF: `.lstrip()` does not reallocate if the string does # not actually start with whitespace. self._buffer[index] = self._buffer[index].lstrip() # Handle case where the remainder of the line contained # only whitespace if not self._buffer[index]: self._buffer[index] = None continue # Try decoding the partial data buffer and return results # from this data = self._buffer[index] for index2 in range(index, len(self._buffer)): # If decoding doesn't succeed with the currently # selected buffer (very unlikely with our current # class of input data) then retry with appending # any other pending pieces of input data # This will happen with JSON data that contains # arbitrary new-lines: "{1:\n2,\n3:4}" if index2 > index: data += "\n" + self._buffer[index2] try: (obj, offset) = self._decoder2.raw_decode(data) except ValueError: # Treat error as fatal if we have already added # the final buffer to the input if (index2 + 1) == len(self._buffer): raise else: index = index2 break # Decoding succeeded – yield result and shorten buffer yield obj if offset < len(self._buffer[index]): self._buffer[index] = self._buffer[index][offset:] else: self._buffer[index] = None index += 1 except ValueError as error: # It is unfortunately not possible to reliably detect whether # parsing ended because of an error *within* the JSON string, or # an unexpected *end* of the JSON string. # We therefor have to assume that any error that occurs here # *might* be related to the JSON parser hitting EOF and therefor # have to postpone error reporting until `parse_finalize` is # called. self._lasterror = error finally: # Remove all processed buffers del self._buffer[0:index]
[ "def", "parse_partial", "(", "self", ",", "data", ")", ":", "try", ":", "# Python 3 requires all JSON data to be a text string", "lines", "=", "self", ".", "_decoder1", ".", "decode", "(", "data", ",", "False", ")", ".", "split", "(", "\"\\n\"", ")", "# Add first input line to last buffer line, if applicable, to", "# handle cases where the JSON string has been chopped in half", "# at the network level due to streaming", "if", "len", "(", "self", ".", "_buffer", ")", ">", "0", "and", "self", ".", "_buffer", "[", "-", "1", "]", "is", "not", "None", ":", "self", ".", "_buffer", "[", "-", "1", "]", "+=", "lines", "[", "0", "]", "self", ".", "_buffer", ".", "extend", "(", "lines", "[", "1", ":", "]", ")", "else", ":", "self", ".", "_buffer", ".", "extend", "(", "lines", ")", "except", "UnicodeDecodeError", "as", "error", ":", "raise", "exceptions", ".", "DecodingError", "(", "'json'", ",", "error", ")", "# Process data buffer", "index", "=", "0", "try", ":", "# Process each line as separate buffer", "#PERF: This way the `.lstrip()` call becomes almost always a NOP", "# even if it does return a different string it will only", "# have to allocate a new buffer for the currently processed", "# line.", "while", "index", "<", "len", "(", "self", ".", "_buffer", ")", ":", "while", "self", ".", "_buffer", "[", "index", "]", ":", "# Make sure buffer does not start with whitespace", "#PERF: `.lstrip()` does not reallocate if the string does", "# not actually start with whitespace.", "self", ".", "_buffer", "[", "index", "]", "=", "self", ".", "_buffer", "[", "index", "]", ".", "lstrip", "(", ")", "# Handle case where the remainder of the line contained", "# only whitespace", "if", "not", "self", ".", "_buffer", "[", "index", "]", ":", "self", ".", "_buffer", "[", "index", "]", "=", "None", "continue", "# Try decoding the partial data buffer and return results", "# from this", "data", "=", "self", ".", "_buffer", "[", "index", "]", "for", "index2", "in", "range", "(", "index", ",", "len", "(", "self", ".", "_buffer", ")", ")", ":", "# If decoding doesn't succeed with the currently", "# selected buffer (very unlikely with our current", "# class of input data) then retry with appending", "# any other pending pieces of input data", "# This will happen with JSON data that contains", "# arbitrary new-lines: \"{1:\\n2,\\n3:4}\"", "if", "index2", ">", "index", ":", "data", "+=", "\"\\n\"", "+", "self", ".", "_buffer", "[", "index2", "]", "try", ":", "(", "obj", ",", "offset", ")", "=", "self", ".", "_decoder2", ".", "raw_decode", "(", "data", ")", "except", "ValueError", ":", "# Treat error as fatal if we have already added", "# the final buffer to the input", "if", "(", "index2", "+", "1", ")", "==", "len", "(", "self", ".", "_buffer", ")", ":", "raise", "else", ":", "index", "=", "index2", "break", "# Decoding succeeded – yield result and shorten buffer", "yield", "obj", "if", "offset", "<", "len", "(", "self", ".", "_buffer", "[", "index", "]", ")", ":", "self", ".", "_buffer", "[", "index", "]", "=", "self", ".", "_buffer", "[", "index", "]", "[", "offset", ":", "]", "else", ":", "self", ".", "_buffer", "[", "index", "]", "=", "None", "index", "+=", "1", "except", "ValueError", "as", "error", ":", "# It is unfortunately not possible to reliably detect whether", "# parsing ended because of an error *within* the JSON string, or", "# an unexpected *end* of the JSON string.", "# We therefor have to assume that any error that occurs here", "# *might* be related to the JSON parser hitting EOF and therefor", "# have to postpone error reporting until `parse_finalize` is", "# called.", "self", ".", "_lasterror", "=", "error", "finally", ":", "# Remove all processed buffers", "del", "self", ".", "_buffer", "[", "0", ":", "index", "]" ]
Incrementally decodes JSON data sets into Python objects. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator
[ "Incrementally", "decodes", "JSON", "data", "sets", "into", "Python", "objects", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/encoding.py#L141-L230
234,655
ipfs/py-ipfs-api
ipfsapi/encoding.py
Json.parse_finalize
def parse_finalize(self): """Raises errors for incomplete buffered data that could not be parsed because the end of the input data has been reached. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- tuple : Always empty """ try: try: # Raise exception for remaining bytes in bytes decoder self._decoder1.decode(b'', True) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) # Late raise errors that looked like they could have been fixed if # the caller had provided more data if self._buffer: raise exceptions.DecodingError('json', self._lasterror) finally: # Reset state self._buffer = [] self._lasterror = None self._decoder1.reset() return ()
python
def parse_finalize(self): """Raises errors for incomplete buffered data that could not be parsed because the end of the input data has been reached. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- tuple : Always empty """ try: try: # Raise exception for remaining bytes in bytes decoder self._decoder1.decode(b'', True) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) # Late raise errors that looked like they could have been fixed if # the caller had provided more data if self._buffer: raise exceptions.DecodingError('json', self._lasterror) finally: # Reset state self._buffer = [] self._lasterror = None self._decoder1.reset() return ()
[ "def", "parse_finalize", "(", "self", ")", ":", "try", ":", "try", ":", "# Raise exception for remaining bytes in bytes decoder", "self", ".", "_decoder1", ".", "decode", "(", "b''", ",", "True", ")", "except", "UnicodeDecodeError", "as", "error", ":", "raise", "exceptions", ".", "DecodingError", "(", "'json'", ",", "error", ")", "# Late raise errors that looked like they could have been fixed if", "# the caller had provided more data", "if", "self", ".", "_buffer", ":", "raise", "exceptions", ".", "DecodingError", "(", "'json'", ",", "self", ".", "_lasterror", ")", "finally", ":", "# Reset state", "self", ".", "_buffer", "=", "[", "]", "self", ".", "_lasterror", "=", "None", "self", ".", "_decoder1", ".", "reset", "(", ")", "return", "(", ")" ]
Raises errors for incomplete buffered data that could not be parsed because the end of the input data has been reached. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- tuple : Always empty
[ "Raises", "errors", "for", "incomplete", "buffered", "data", "that", "could", "not", "be", "parsed", "because", "the", "end", "of", "the", "input", "data", "has", "been", "reached", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/encoding.py#L232-L261
234,656
ipfs/py-ipfs-api
ipfsapi/encoding.py
Json.encode
def encode(self, obj): """Returns ``obj`` serialized as JSON formatted bytes. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : str | list | dict | int JSON serializable Python object Returns ------- bytes """ try: result = json.dumps(obj, sort_keys=True, indent=None, separators=(',', ':'), ensure_ascii=False) if isinstance(result, six.text_type): return result.encode("utf-8") else: return result except (UnicodeEncodeError, TypeError) as error: raise exceptions.EncodingError('json', error)
python
def encode(self, obj): """Returns ``obj`` serialized as JSON formatted bytes. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : str | list | dict | int JSON serializable Python object Returns ------- bytes """ try: result = json.dumps(obj, sort_keys=True, indent=None, separators=(',', ':'), ensure_ascii=False) if isinstance(result, six.text_type): return result.encode("utf-8") else: return result except (UnicodeEncodeError, TypeError) as error: raise exceptions.EncodingError('json', error)
[ "def", "encode", "(", "self", ",", "obj", ")", ":", "try", ":", "result", "=", "json", ".", "dumps", "(", "obj", ",", "sort_keys", "=", "True", ",", "indent", "=", "None", ",", "separators", "=", "(", "','", ",", "':'", ")", ",", "ensure_ascii", "=", "False", ")", "if", "isinstance", "(", "result", ",", "six", ".", "text_type", ")", ":", "return", "result", ".", "encode", "(", "\"utf-8\"", ")", "else", ":", "return", "result", "except", "(", "UnicodeEncodeError", ",", "TypeError", ")", "as", "error", ":", "raise", "exceptions", ".", "EncodingError", "(", "'json'", ",", "error", ")" ]
Returns ``obj`` serialized as JSON formatted bytes. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : str | list | dict | int JSON serializable Python object Returns ------- bytes
[ "Returns", "obj", "serialized", "as", "JSON", "formatted", "bytes", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/encoding.py#L263-L287
234,657
ipfs/py-ipfs-api
ipfsapi/encoding.py
Pickle.parse_finalize
def parse_finalize(self): """Parses the buffered data and yields the result. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ try: self._buffer.seek(0, 0) yield pickle.load(self._buffer) except pickle.UnpicklingError as error: raise exceptions.DecodingError('pickle', error)
python
def parse_finalize(self): """Parses the buffered data and yields the result. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator """ try: self._buffer.seek(0, 0) yield pickle.load(self._buffer) except pickle.UnpicklingError as error: raise exceptions.DecodingError('pickle', error)
[ "def", "parse_finalize", "(", "self", ")", ":", "try", ":", "self", ".", "_buffer", ".", "seek", "(", "0", ",", "0", ")", "yield", "pickle", ".", "load", "(", "self", ".", "_buffer", ")", "except", "pickle", ".", "UnpicklingError", "as", "error", ":", "raise", "exceptions", ".", "DecodingError", "(", "'pickle'", ",", "error", ")" ]
Parses the buffered data and yields the result. Raises ------ ~ipfsapi.exceptions.DecodingError Returns ------- generator
[ "Parses", "the", "buffered", "data", "and", "yields", "the", "result", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/encoding.py#L319-L334
234,658
ipfs/py-ipfs-api
ipfsapi/encoding.py
Pickle.encode
def encode(self, obj): """Returns ``obj`` serialized as a pickle binary string. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : object Serializable Python object Returns ------- bytes """ try: return pickle.dumps(obj) except pickle.PicklingError as error: raise exceptions.EncodingError('pickle', error)
python
def encode(self, obj): """Returns ``obj`` serialized as a pickle binary string. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : object Serializable Python object Returns ------- bytes """ try: return pickle.dumps(obj) except pickle.PicklingError as error: raise exceptions.EncodingError('pickle', error)
[ "def", "encode", "(", "self", ",", "obj", ")", ":", "try", ":", "return", "pickle", ".", "dumps", "(", "obj", ")", "except", "pickle", ".", "PicklingError", "as", "error", ":", "raise", "exceptions", ".", "EncodingError", "(", "'pickle'", ",", "error", ")" ]
Returns ``obj`` serialized as a pickle binary string. Raises ------ ~ipfsapi.exceptions.EncodingError Parameters ---------- obj : object Serializable Python object Returns ------- bytes
[ "Returns", "obj", "serialized", "as", "a", "pickle", "binary", "string", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/encoding.py#L360-L379
234,659
ipfs/py-ipfs-api
ipfsapi/multipart.py
glob_compile
def glob_compile(pat): """Translate a shell glob PATTERN to a regular expression. This is almost entirely based on `fnmatch.translate` source-code from the python 3.5 standard-library. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i + 1 if c == '/' and len(pat) > (i + 2) and pat[i:(i + 3)] == '**/': # Special-case for "any number of sub-directories" operator since # may also expand to no entries: # Otherwise `a/**/b` would expand to `a[/].*[/]b` which wouldn't # match the immediate sub-directories of `a`, like `a/b`. i = i + 3 res = res + '[/]([^/]*[/])*' elif c == '*': if len(pat) > i and pat[i] == '*': i = i + 1 res = res + '.*' else: res = res + '[^/]*' elif c == '?': res = res + '[^/]' elif c == '[': j = i if j < n and pat[j] == '!': j = j + 1 if j < n and pat[j] == ']': j = j + 1 while j < n and pat[j] != ']': j = j + 1 if j >= n: res = res + '\\[' else: stuff = pat[i:j].replace('\\', '\\\\') i = j + 1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) return re.compile('^' + res + '\Z(?ms)' + '$')
python
def glob_compile(pat): """Translate a shell glob PATTERN to a regular expression. This is almost entirely based on `fnmatch.translate` source-code from the python 3.5 standard-library. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i + 1 if c == '/' and len(pat) > (i + 2) and pat[i:(i + 3)] == '**/': # Special-case for "any number of sub-directories" operator since # may also expand to no entries: # Otherwise `a/**/b` would expand to `a[/].*[/]b` which wouldn't # match the immediate sub-directories of `a`, like `a/b`. i = i + 3 res = res + '[/]([^/]*[/])*' elif c == '*': if len(pat) > i and pat[i] == '*': i = i + 1 res = res + '.*' else: res = res + '[^/]*' elif c == '?': res = res + '[^/]' elif c == '[': j = i if j < n and pat[j] == '!': j = j + 1 if j < n and pat[j] == ']': j = j + 1 while j < n and pat[j] != ']': j = j + 1 if j >= n: res = res + '\\[' else: stuff = pat[i:j].replace('\\', '\\\\') i = j + 1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) else: res = res + re.escape(c) return re.compile('^' + res + '\Z(?ms)' + '$')
[ "def", "glob_compile", "(", "pat", ")", ":", "i", ",", "n", "=", "0", ",", "len", "(", "pat", ")", "res", "=", "''", "while", "i", "<", "n", ":", "c", "=", "pat", "[", "i", "]", "i", "=", "i", "+", "1", "if", "c", "==", "'/'", "and", "len", "(", "pat", ")", ">", "(", "i", "+", "2", ")", "and", "pat", "[", "i", ":", "(", "i", "+", "3", ")", "]", "==", "'**/'", ":", "# Special-case for \"any number of sub-directories\" operator since", "# may also expand to no entries:", "# Otherwise `a/**/b` would expand to `a[/].*[/]b` which wouldn't", "# match the immediate sub-directories of `a`, like `a/b`.", "i", "=", "i", "+", "3", "res", "=", "res", "+", "'[/]([^/]*[/])*'", "elif", "c", "==", "'*'", ":", "if", "len", "(", "pat", ")", ">", "i", "and", "pat", "[", "i", "]", "==", "'*'", ":", "i", "=", "i", "+", "1", "res", "=", "res", "+", "'.*'", "else", ":", "res", "=", "res", "+", "'[^/]*'", "elif", "c", "==", "'?'", ":", "res", "=", "res", "+", "'[^/]'", "elif", "c", "==", "'['", ":", "j", "=", "i", "if", "j", "<", "n", "and", "pat", "[", "j", "]", "==", "'!'", ":", "j", "=", "j", "+", "1", "if", "j", "<", "n", "and", "pat", "[", "j", "]", "==", "']'", ":", "j", "=", "j", "+", "1", "while", "j", "<", "n", "and", "pat", "[", "j", "]", "!=", "']'", ":", "j", "=", "j", "+", "1", "if", "j", ">=", "n", ":", "res", "=", "res", "+", "'\\\\['", "else", ":", "stuff", "=", "pat", "[", "i", ":", "j", "]", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", "i", "=", "j", "+", "1", "if", "stuff", "[", "0", "]", "==", "'!'", ":", "stuff", "=", "'^'", "+", "stuff", "[", "1", ":", "]", "elif", "stuff", "[", "0", "]", "==", "'^'", ":", "stuff", "=", "'\\\\'", "+", "stuff", "res", "=", "'%s[%s]'", "%", "(", "res", ",", "stuff", ")", "else", ":", "res", "=", "res", "+", "re", ".", "escape", "(", "c", ")", "return", "re", ".", "compile", "(", "'^'", "+", "res", "+", "'\\Z(?ms)'", "+", "'$'", ")" ]
Translate a shell glob PATTERN to a regular expression. This is almost entirely based on `fnmatch.translate` source-code from the python 3.5 standard-library.
[ "Translate", "a", "shell", "glob", "PATTERN", "to", "a", "regular", "expression", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L319-L366
234,660
ipfs/py-ipfs-api
ipfsapi/multipart.py
stream_files
def stream_files(files, chunk_size=default_chunk_size): """Gets a buffered generator for streaming files. Returns a buffered generator which encodes a file or list of files as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- files : str The file(s) to stream chunk_size : int Maximum size of each stream chunk """ stream = FileStream(files, chunk_size=chunk_size) return stream.body(), stream.headers
python
def stream_files(files, chunk_size=default_chunk_size): """Gets a buffered generator for streaming files. Returns a buffered generator which encodes a file or list of files as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- files : str The file(s) to stream chunk_size : int Maximum size of each stream chunk """ stream = FileStream(files, chunk_size=chunk_size) return stream.body(), stream.headers
[ "def", "stream_files", "(", "files", ",", "chunk_size", "=", "default_chunk_size", ")", ":", "stream", "=", "FileStream", "(", "files", ",", "chunk_size", "=", "chunk_size", ")", "return", "stream", ".", "body", "(", ")", ",", "stream", ".", "headers" ]
Gets a buffered generator for streaming files. Returns a buffered generator which encodes a file or list of files as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- files : str The file(s) to stream chunk_size : int Maximum size of each stream chunk
[ "Gets", "a", "buffered", "generator", "for", "streaming", "files", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L560-L575
234,661
ipfs/py-ipfs-api
ipfsapi/multipart.py
stream_directory
def stream_directory(directory, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming directories. Returns a buffered generator which encodes a directory as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- directory : str The filepath of the directory to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ stream = DirectoryStream(directory, recursive=recursive, patterns=patterns, chunk_size=chunk_size) return stream.body(), stream.headers
python
def stream_directory(directory, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming directories. Returns a buffered generator which encodes a directory as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- directory : str The filepath of the directory to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ stream = DirectoryStream(directory, recursive=recursive, patterns=patterns, chunk_size=chunk_size) return stream.body(), stream.headers
[ "def", "stream_directory", "(", "directory", ",", "recursive", "=", "False", ",", "patterns", "=", "'**'", ",", "chunk_size", "=", "default_chunk_size", ")", ":", "stream", "=", "DirectoryStream", "(", "directory", ",", "recursive", "=", "recursive", ",", "patterns", "=", "patterns", ",", "chunk_size", "=", "chunk_size", ")", "return", "stream", ".", "body", "(", ")", ",", "stream", ".", "headers" ]
Gets a buffered generator for streaming directories. Returns a buffered generator which encodes a directory as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- directory : str The filepath of the directory to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk
[ "Gets", "a", "buffered", "generator", "for", "streaming", "directories", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L578-L604
234,662
ipfs/py-ipfs-api
ipfsapi/multipart.py
stream_filesystem_node
def stream_filesystem_node(path, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming either files or directories. Returns a buffered generator which encodes the file or directory at the given path as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- path : str The filepath of the directory or file to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ is_dir = isinstance(path, six.string_types) and os.path.isdir(path) if recursive or is_dir: return stream_directory(path, recursive, patterns, chunk_size) else: return stream_files(path, chunk_size)
python
def stream_filesystem_node(path, recursive=False, patterns='**', chunk_size=default_chunk_size): """Gets a buffered generator for streaming either files or directories. Returns a buffered generator which encodes the file or directory at the given path as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- path : str The filepath of the directory or file to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk """ is_dir = isinstance(path, six.string_types) and os.path.isdir(path) if recursive or is_dir: return stream_directory(path, recursive, patterns, chunk_size) else: return stream_files(path, chunk_size)
[ "def", "stream_filesystem_node", "(", "path", ",", "recursive", "=", "False", ",", "patterns", "=", "'**'", ",", "chunk_size", "=", "default_chunk_size", ")", ":", "is_dir", "=", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "and", "os", ".", "path", ".", "isdir", "(", "path", ")", "if", "recursive", "or", "is_dir", ":", "return", "stream_directory", "(", "path", ",", "recursive", ",", "patterns", ",", "chunk_size", ")", "else", ":", "return", "stream_files", "(", "path", ",", "chunk_size", ")" ]
Gets a buffered generator for streaming either files or directories. Returns a buffered generator which encodes the file or directory at the given path as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- path : str The filepath of the directory or file to stream recursive : bool Stream all content within the directory recursively? patterns : str | list Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep chunk_size : int Maximum size of each stream chunk
[ "Gets", "a", "buffered", "generator", "for", "streaming", "either", "files", "or", "directories", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L607-L633
234,663
ipfs/py-ipfs-api
ipfsapi/multipart.py
stream_bytes
def stream_bytes(data, chunk_size=default_chunk_size): """Gets a buffered generator for streaming binary data. Returns a buffered generator which encodes binary data as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- data : bytes The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ stream = BytesStream(data, chunk_size=chunk_size) return stream.body(), stream.headers
python
def stream_bytes(data, chunk_size=default_chunk_size): """Gets a buffered generator for streaming binary data. Returns a buffered generator which encodes binary data as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- data : bytes The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ stream = BytesStream(data, chunk_size=chunk_size) return stream.body(), stream.headers
[ "def", "stream_bytes", "(", "data", ",", "chunk_size", "=", "default_chunk_size", ")", ":", "stream", "=", "BytesStream", "(", "data", ",", "chunk_size", "=", "chunk_size", ")", "return", "stream", ".", "body", "(", ")", ",", "stream", ".", "headers" ]
Gets a buffered generator for streaming binary data. Returns a buffered generator which encodes binary data as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- data : bytes The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict)
[ "Gets", "a", "buffered", "generator", "for", "streaming", "binary", "data", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L636-L655
234,664
ipfs/py-ipfs-api
ipfsapi/multipart.py
stream_text
def stream_text(text, chunk_size=default_chunk_size): """Gets a buffered generator for streaming text. Returns a buffered generator which encodes a string as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- text : str The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ if isgenerator(text): def binary_stream(): for item in text: if six.PY2 and isinstance(text, six.binary_type): #PY2: Allow binary strings under Python 2 since # Python 2 code is not expected to always get the # distinction between text and binary strings right. yield text else: yield text.encode("utf-8") data = binary_stream() elif six.PY2 and isinstance(text, six.binary_type): #PY2: See above. data = text else: data = text.encode("utf-8") return stream_bytes(data, chunk_size)
python
def stream_text(text, chunk_size=default_chunk_size): """Gets a buffered generator for streaming text. Returns a buffered generator which encodes a string as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- text : str The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict) """ if isgenerator(text): def binary_stream(): for item in text: if six.PY2 and isinstance(text, six.binary_type): #PY2: Allow binary strings under Python 2 since # Python 2 code is not expected to always get the # distinction between text and binary strings right. yield text else: yield text.encode("utf-8") data = binary_stream() elif six.PY2 and isinstance(text, six.binary_type): #PY2: See above. data = text else: data = text.encode("utf-8") return stream_bytes(data, chunk_size)
[ "def", "stream_text", "(", "text", ",", "chunk_size", "=", "default_chunk_size", ")", ":", "if", "isgenerator", "(", "text", ")", ":", "def", "binary_stream", "(", ")", ":", "for", "item", "in", "text", ":", "if", "six", ".", "PY2", "and", "isinstance", "(", "text", ",", "six", ".", "binary_type", ")", ":", "#PY2: Allow binary strings under Python 2 since", "# Python 2 code is not expected to always get the", "# distinction between text and binary strings right.", "yield", "text", "else", ":", "yield", "text", ".", "encode", "(", "\"utf-8\"", ")", "data", "=", "binary_stream", "(", ")", "elif", "six", ".", "PY2", "and", "isinstance", "(", "text", ",", "six", ".", "binary_type", ")", ":", "#PY2: See above.", "data", "=", "text", "else", ":", "data", "=", "text", ".", "encode", "(", "\"utf-8\"", ")", "return", "stream_bytes", "(", "data", ",", "chunk_size", ")" ]
Gets a buffered generator for streaming text. Returns a buffered generator which encodes a string as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- text : str The data bytes to stream chunk_size : int The maximum size of each stream chunk Returns ------- (generator, dict)
[ "Gets", "a", "buffered", "generator", "for", "streaming", "text", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L658-L692
234,665
ipfs/py-ipfs-api
ipfsapi/multipart.py
BodyGenerator._write_headers
def _write_headers(self, headers): """Yields the HTTP header text for some content. Parameters ---------- headers : dict The headers to yield """ if headers: for name in sorted(headers.keys()): yield name.encode("ascii") yield b': ' yield headers[name].encode("ascii") yield CRLF yield CRLF
python
def _write_headers(self, headers): """Yields the HTTP header text for some content. Parameters ---------- headers : dict The headers to yield """ if headers: for name in sorted(headers.keys()): yield name.encode("ascii") yield b': ' yield headers[name].encode("ascii") yield CRLF yield CRLF
[ "def", "_write_headers", "(", "self", ",", "headers", ")", ":", "if", "headers", ":", "for", "name", "in", "sorted", "(", "headers", ".", "keys", "(", ")", ")", ":", "yield", "name", ".", "encode", "(", "\"ascii\"", ")", "yield", "b': '", "yield", "headers", "[", "name", "]", ".", "encode", "(", "\"ascii\"", ")", "yield", "CRLF", "yield", "CRLF" ]
Yields the HTTP header text for some content. Parameters ---------- headers : dict The headers to yield
[ "Yields", "the", "HTTP", "header", "text", "for", "some", "content", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L141-L155
234,666
ipfs/py-ipfs-api
ipfsapi/multipart.py
BodyGenerator.file_open
def file_open(self, fn): """Yields the opening text of a file section in multipart HTTP. Parameters ---------- fn : str Filename for the file being opened and added to the HTTP body """ yield b'--' yield self.boundary.encode() yield CRLF headers = content_disposition(fn) headers.update(content_type(fn)) for c in self._write_headers(headers): yield c
python
def file_open(self, fn): """Yields the opening text of a file section in multipart HTTP. Parameters ---------- fn : str Filename for the file being opened and added to the HTTP body """ yield b'--' yield self.boundary.encode() yield CRLF headers = content_disposition(fn) headers.update(content_type(fn)) for c in self._write_headers(headers): yield c
[ "def", "file_open", "(", "self", ",", "fn", ")", ":", "yield", "b'--'", "yield", "self", ".", "boundary", ".", "encode", "(", ")", "yield", "CRLF", "headers", "=", "content_disposition", "(", "fn", ")", "headers", ".", "update", "(", "content_type", "(", "fn", ")", ")", "for", "c", "in", "self", ".", "_write_headers", "(", "headers", ")", ":", "yield", "c" ]
Yields the opening text of a file section in multipart HTTP. Parameters ---------- fn : str Filename for the file being opened and added to the HTTP body
[ "Yields", "the", "opening", "text", "of", "a", "file", "section", "in", "multipart", "HTTP", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L169-L183
234,667
ipfs/py-ipfs-api
ipfsapi/multipart.py
BufferedGenerator.file_chunks
def file_chunks(self, fp): """Yields chunks of a file. Parameters ---------- fp : io.RawIOBase The file to break into chunks (must be an open file or have the ``readinto`` method) """ fsize = utils.file_size(fp) offset = 0 if hasattr(fp, 'readinto'): while offset < fsize: nb = fp.readinto(self._internal) yield self.buf[:nb] offset += nb else: while offset < fsize: nb = min(self.chunk_size, fsize - offset) yield fp.read(nb) offset += nb
python
def file_chunks(self, fp): """Yields chunks of a file. Parameters ---------- fp : io.RawIOBase The file to break into chunks (must be an open file or have the ``readinto`` method) """ fsize = utils.file_size(fp) offset = 0 if hasattr(fp, 'readinto'): while offset < fsize: nb = fp.readinto(self._internal) yield self.buf[:nb] offset += nb else: while offset < fsize: nb = min(self.chunk_size, fsize - offset) yield fp.read(nb) offset += nb
[ "def", "file_chunks", "(", "self", ",", "fp", ")", ":", "fsize", "=", "utils", ".", "file_size", "(", "fp", ")", "offset", "=", "0", "if", "hasattr", "(", "fp", ",", "'readinto'", ")", ":", "while", "offset", "<", "fsize", ":", "nb", "=", "fp", ".", "readinto", "(", "self", ".", "_internal", ")", "yield", "self", ".", "buf", "[", ":", "nb", "]", "offset", "+=", "nb", "else", ":", "while", "offset", "<", "fsize", ":", "nb", "=", "min", "(", "self", ".", "chunk_size", ",", "fsize", "-", "offset", ")", "yield", "fp", ".", "read", "(", "nb", ")", "offset", "+=", "nb" ]
Yields chunks of a file. Parameters ---------- fp : io.RawIOBase The file to break into chunks (must be an open file or have the ``readinto`` method)
[ "Yields", "chunks", "of", "a", "file", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L222-L242
234,668
ipfs/py-ipfs-api
ipfsapi/multipart.py
BufferedGenerator.gen_chunks
def gen_chunks(self, gen): """Generates byte chunks of a given size. Takes a bytes generator and yields chunks of a maximum of ``chunk_size`` bytes. Parameters ---------- gen : generator The bytes generator that produces the bytes """ for data in gen: size = len(data) if size < self.chunk_size: yield data else: mv = buffer(data) offset = 0 while offset < size: nb = min(self.chunk_size, size - offset) yield mv[offset:offset + nb] offset += nb
python
def gen_chunks(self, gen): """Generates byte chunks of a given size. Takes a bytes generator and yields chunks of a maximum of ``chunk_size`` bytes. Parameters ---------- gen : generator The bytes generator that produces the bytes """ for data in gen: size = len(data) if size < self.chunk_size: yield data else: mv = buffer(data) offset = 0 while offset < size: nb = min(self.chunk_size, size - offset) yield mv[offset:offset + nb] offset += nb
[ "def", "gen_chunks", "(", "self", ",", "gen", ")", ":", "for", "data", "in", "gen", ":", "size", "=", "len", "(", "data", ")", "if", "size", "<", "self", ".", "chunk_size", ":", "yield", "data", "else", ":", "mv", "=", "buffer", "(", "data", ")", "offset", "=", "0", "while", "offset", "<", "size", ":", "nb", "=", "min", "(", "self", ".", "chunk_size", ",", "size", "-", "offset", ")", "yield", "mv", "[", "offset", ":", "offset", "+", "nb", "]", "offset", "+=", "nb" ]
Generates byte chunks of a given size. Takes a bytes generator and yields chunks of a maximum of ``chunk_size`` bytes. Parameters ---------- gen : generator The bytes generator that produces the bytes
[ "Generates", "byte", "chunks", "of", "a", "given", "size", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L244-L265
234,669
ipfs/py-ipfs-api
ipfsapi/multipart.py
FileStream.body
def body(self): """Yields the body of the buffered file.""" for fp, need_close in self.files: try: name = os.path.basename(fp.name) except AttributeError: name = '' for chunk in self.gen_chunks(self.envelope.file_open(name)): yield chunk for chunk in self.file_chunks(fp): yield chunk for chunk in self.gen_chunks(self.envelope.file_close()): yield chunk if need_close: fp.close() for chunk in self.close(): yield chunk
python
def body(self): """Yields the body of the buffered file.""" for fp, need_close in self.files: try: name = os.path.basename(fp.name) except AttributeError: name = '' for chunk in self.gen_chunks(self.envelope.file_open(name)): yield chunk for chunk in self.file_chunks(fp): yield chunk for chunk in self.gen_chunks(self.envelope.file_close()): yield chunk if need_close: fp.close() for chunk in self.close(): yield chunk
[ "def", "body", "(", "self", ")", ":", "for", "fp", ",", "need_close", "in", "self", ".", "files", ":", "try", ":", "name", "=", "os", ".", "path", ".", "basename", "(", "fp", ".", "name", ")", "except", "AttributeError", ":", "name", "=", "''", "for", "chunk", "in", "self", ".", "gen_chunks", "(", "self", ".", "envelope", ".", "file_open", "(", "name", ")", ")", ":", "yield", "chunk", "for", "chunk", "in", "self", ".", "file_chunks", "(", "fp", ")", ":", "yield", "chunk", "for", "chunk", "in", "self", ".", "gen_chunks", "(", "self", ".", "envelope", ".", "file_close", "(", ")", ")", ":", "yield", "chunk", "if", "need_close", ":", "fp", ".", "close", "(", ")", "for", "chunk", "in", "self", ".", "close", "(", ")", ":", "yield", "chunk" ]
Yields the body of the buffered file.
[ "Yields", "the", "body", "of", "the", "buffered", "file", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L300-L316
234,670
ipfs/py-ipfs-api
ipfsapi/multipart.py
DirectoryStream._prepare
def _prepare(self): """Pre-formats the multipart HTTP request to transmit the directory.""" names = [] added_directories = set() def add_directory(short_path): # Do not continue if this directory has already been added if short_path in added_directories: return # Scan for first super-directory that has already been added dir_base = short_path dir_parts = [] while dir_base: dir_base, dir_name = os.path.split(dir_base) dir_parts.append(dir_name) if dir_base in added_directories: break # Add missing intermediate directory nodes in the right order while dir_parts: dir_base = os.path.join(dir_base, dir_parts.pop()) # Create an empty, fake file to represent the directory mock_file = io.StringIO() mock_file.write(u'') # Add this directory to those that will be sent names.append(('files', (dir_base.replace(os.sep, '/'), mock_file, 'application/x-directory'))) # Remember that this directory has already been sent added_directories.add(dir_base) def add_file(short_path, full_path): try: # Always add files in wildcard directories names.append(('files', (short_name.replace(os.sep, '/'), open(full_path, 'rb'), 'application/octet-stream'))) except OSError: # File might have disappeared between `os.walk()` and `open()` pass def match_short_path(short_path): # Remove initial path component so that all files are based in # the target directory itself (not one level above) if os.sep in short_path: path = short_path.split(os.sep, 1)[1] else: return False # Convert all path seperators to POSIX style path = path.replace(os.sep, '/') # Do the matching and the simplified path for pattern in self.patterns: if pattern.match(path): return True return False # Identify the unecessary portion of the relative path truncate = os.path.dirname(self.directory) # Traverse the filesystem downward from the target directory's uri # Errors: `os.walk()` will simply return an empty generator if the # target directory does not exist. wildcard_directories = set() for curr_dir, _, files in os.walk(self.directory): # find the path relative to the directory being added if len(truncate) > 0: _, _, short_path = curr_dir.partition(truncate) else: short_path = curr_dir # remove leading / or \ if it is present if short_path.startswith(os.sep): short_path = short_path[1:] wildcard_directory = False if os.path.split(short_path)[0] in wildcard_directories: # Parent directory has matched a pattern, all sub-nodes should # be added too wildcard_directories.add(short_path) wildcard_directory = True else: # Check if directory path matches one of the patterns if match_short_path(short_path): # Directory matched pattern and it should therefor # be added along with all of its contents wildcard_directories.add(short_path) wildcard_directory = True # Always add directories within wildcard directories - even if they # are empty if wildcard_directory: add_directory(short_path) # Iterate across the files in the current directory for filename in files: # Find the filename relative to the directory being added short_name = os.path.join(short_path, filename) filepath = os.path.join(curr_dir, filename) if wildcard_directory: # Always add files in wildcard directories add_file(short_name, filepath) else: # Add file (and all missing intermediary directories) # if it matches one of the patterns if match_short_path(short_name): add_directory(short_path) add_file(short_name, filepath) # Send the request and present the response body to the user req = requests.Request("POST", 'http://localhost', files=names) prep = req.prepare() return prep
python
def _prepare(self): """Pre-formats the multipart HTTP request to transmit the directory.""" names = [] added_directories = set() def add_directory(short_path): # Do not continue if this directory has already been added if short_path in added_directories: return # Scan for first super-directory that has already been added dir_base = short_path dir_parts = [] while dir_base: dir_base, dir_name = os.path.split(dir_base) dir_parts.append(dir_name) if dir_base in added_directories: break # Add missing intermediate directory nodes in the right order while dir_parts: dir_base = os.path.join(dir_base, dir_parts.pop()) # Create an empty, fake file to represent the directory mock_file = io.StringIO() mock_file.write(u'') # Add this directory to those that will be sent names.append(('files', (dir_base.replace(os.sep, '/'), mock_file, 'application/x-directory'))) # Remember that this directory has already been sent added_directories.add(dir_base) def add_file(short_path, full_path): try: # Always add files in wildcard directories names.append(('files', (short_name.replace(os.sep, '/'), open(full_path, 'rb'), 'application/octet-stream'))) except OSError: # File might have disappeared between `os.walk()` and `open()` pass def match_short_path(short_path): # Remove initial path component so that all files are based in # the target directory itself (not one level above) if os.sep in short_path: path = short_path.split(os.sep, 1)[1] else: return False # Convert all path seperators to POSIX style path = path.replace(os.sep, '/') # Do the matching and the simplified path for pattern in self.patterns: if pattern.match(path): return True return False # Identify the unecessary portion of the relative path truncate = os.path.dirname(self.directory) # Traverse the filesystem downward from the target directory's uri # Errors: `os.walk()` will simply return an empty generator if the # target directory does not exist. wildcard_directories = set() for curr_dir, _, files in os.walk(self.directory): # find the path relative to the directory being added if len(truncate) > 0: _, _, short_path = curr_dir.partition(truncate) else: short_path = curr_dir # remove leading / or \ if it is present if short_path.startswith(os.sep): short_path = short_path[1:] wildcard_directory = False if os.path.split(short_path)[0] in wildcard_directories: # Parent directory has matched a pattern, all sub-nodes should # be added too wildcard_directories.add(short_path) wildcard_directory = True else: # Check if directory path matches one of the patterns if match_short_path(short_path): # Directory matched pattern and it should therefor # be added along with all of its contents wildcard_directories.add(short_path) wildcard_directory = True # Always add directories within wildcard directories - even if they # are empty if wildcard_directory: add_directory(short_path) # Iterate across the files in the current directory for filename in files: # Find the filename relative to the directory being added short_name = os.path.join(short_path, filename) filepath = os.path.join(curr_dir, filename) if wildcard_directory: # Always add files in wildcard directories add_file(short_name, filepath) else: # Add file (and all missing intermediary directories) # if it matches one of the patterns if match_short_path(short_name): add_directory(short_path) add_file(short_name, filepath) # Send the request and present the response body to the user req = requests.Request("POST", 'http://localhost', files=names) prep = req.prepare() return prep
[ "def", "_prepare", "(", "self", ")", ":", "names", "=", "[", "]", "added_directories", "=", "set", "(", ")", "def", "add_directory", "(", "short_path", ")", ":", "# Do not continue if this directory has already been added", "if", "short_path", "in", "added_directories", ":", "return", "# Scan for first super-directory that has already been added", "dir_base", "=", "short_path", "dir_parts", "=", "[", "]", "while", "dir_base", ":", "dir_base", ",", "dir_name", "=", "os", ".", "path", ".", "split", "(", "dir_base", ")", "dir_parts", ".", "append", "(", "dir_name", ")", "if", "dir_base", "in", "added_directories", ":", "break", "# Add missing intermediate directory nodes in the right order", "while", "dir_parts", ":", "dir_base", "=", "os", ".", "path", ".", "join", "(", "dir_base", ",", "dir_parts", ".", "pop", "(", ")", ")", "# Create an empty, fake file to represent the directory", "mock_file", "=", "io", ".", "StringIO", "(", ")", "mock_file", ".", "write", "(", "u''", ")", "# Add this directory to those that will be sent", "names", ".", "append", "(", "(", "'files'", ",", "(", "dir_base", ".", "replace", "(", "os", ".", "sep", ",", "'/'", ")", ",", "mock_file", ",", "'application/x-directory'", ")", ")", ")", "# Remember that this directory has already been sent", "added_directories", ".", "add", "(", "dir_base", ")", "def", "add_file", "(", "short_path", ",", "full_path", ")", ":", "try", ":", "# Always add files in wildcard directories", "names", ".", "append", "(", "(", "'files'", ",", "(", "short_name", ".", "replace", "(", "os", ".", "sep", ",", "'/'", ")", ",", "open", "(", "full_path", ",", "'rb'", ")", ",", "'application/octet-stream'", ")", ")", ")", "except", "OSError", ":", "# File might have disappeared between `os.walk()` and `open()`", "pass", "def", "match_short_path", "(", "short_path", ")", ":", "# Remove initial path component so that all files are based in", "# the target directory itself (not one level above)", "if", "os", ".", "sep", "in", "short_path", ":", "path", "=", "short_path", ".", "split", "(", "os", ".", "sep", ",", "1", ")", "[", "1", "]", "else", ":", "return", "False", "# Convert all path seperators to POSIX style", "path", "=", "path", ".", "replace", "(", "os", ".", "sep", ",", "'/'", ")", "# Do the matching and the simplified path", "for", "pattern", "in", "self", ".", "patterns", ":", "if", "pattern", ".", "match", "(", "path", ")", ":", "return", "True", "return", "False", "# Identify the unecessary portion of the relative path", "truncate", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "directory", ")", "# Traverse the filesystem downward from the target directory's uri", "# Errors: `os.walk()` will simply return an empty generator if the", "# target directory does not exist.", "wildcard_directories", "=", "set", "(", ")", "for", "curr_dir", ",", "_", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "directory", ")", ":", "# find the path relative to the directory being added", "if", "len", "(", "truncate", ")", ">", "0", ":", "_", ",", "_", ",", "short_path", "=", "curr_dir", ".", "partition", "(", "truncate", ")", "else", ":", "short_path", "=", "curr_dir", "# remove leading / or \\ if it is present", "if", "short_path", ".", "startswith", "(", "os", ".", "sep", ")", ":", "short_path", "=", "short_path", "[", "1", ":", "]", "wildcard_directory", "=", "False", "if", "os", ".", "path", ".", "split", "(", "short_path", ")", "[", "0", "]", "in", "wildcard_directories", ":", "# Parent directory has matched a pattern, all sub-nodes should", "# be added too", "wildcard_directories", ".", "add", "(", "short_path", ")", "wildcard_directory", "=", "True", "else", ":", "# Check if directory path matches one of the patterns", "if", "match_short_path", "(", "short_path", ")", ":", "# Directory matched pattern and it should therefor", "# be added along with all of its contents", "wildcard_directories", ".", "add", "(", "short_path", ")", "wildcard_directory", "=", "True", "# Always add directories within wildcard directories - even if they", "# are empty", "if", "wildcard_directory", ":", "add_directory", "(", "short_path", ")", "# Iterate across the files in the current directory", "for", "filename", "in", "files", ":", "# Find the filename relative to the directory being added", "short_name", "=", "os", ".", "path", ".", "join", "(", "short_path", ",", "filename", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "curr_dir", ",", "filename", ")", "if", "wildcard_directory", ":", "# Always add files in wildcard directories", "add_file", "(", "short_name", ",", "filepath", ")", "else", ":", "# Add file (and all missing intermediary directories)", "# if it matches one of the patterns", "if", "match_short_path", "(", "short_name", ")", ":", "add_directory", "(", "short_path", ")", "add_file", "(", "short_name", ",", "filepath", ")", "# Send the request and present the response body to the user", "req", "=", "requests", ".", "Request", "(", "\"POST\"", ",", "'http://localhost'", ",", "files", "=", "names", ")", "prep", "=", "req", ".", "prepare", "(", ")", "return", "prep" ]
Pre-formats the multipart HTTP request to transmit the directory.
[ "Pre", "-", "formats", "the", "multipart", "HTTP", "request", "to", "transmit", "the", "directory", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L415-L528
234,671
ipfs/py-ipfs-api
ipfsapi/multipart.py
BytesStream.body
def body(self): """Yields the encoded body.""" for chunk in self.gen_chunks(self.envelope.file_open(self.name)): yield chunk for chunk in self.gen_chunks(self.data): yield chunk for chunk in self.gen_chunks(self.envelope.file_close()): yield chunk for chunk in self.close(): yield chunk
python
def body(self): """Yields the encoded body.""" for chunk in self.gen_chunks(self.envelope.file_open(self.name)): yield chunk for chunk in self.gen_chunks(self.data): yield chunk for chunk in self.gen_chunks(self.envelope.file_close()): yield chunk for chunk in self.close(): yield chunk
[ "def", "body", "(", "self", ")", ":", "for", "chunk", "in", "self", ".", "gen_chunks", "(", "self", ".", "envelope", ".", "file_open", "(", "self", ".", "name", ")", ")", ":", "yield", "chunk", "for", "chunk", "in", "self", ".", "gen_chunks", "(", "self", ".", "data", ")", ":", "yield", "chunk", "for", "chunk", "in", "self", ".", "gen_chunks", "(", "self", ".", "envelope", ".", "file_close", "(", ")", ")", ":", "yield", "chunk", "for", "chunk", "in", "self", ".", "close", "(", ")", ":", "yield", "chunk" ]
Yields the encoded body.
[ "Yields", "the", "encoded", "body", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/multipart.py#L548-L557
234,672
ipfs/py-ipfs-api
ipfsapi/http.py
pass_defaults
def pass_defaults(func): """Decorator that returns a function named wrapper. When invoked, wrapper invokes func with default kwargs appended. Parameters ---------- func : callable The function to append the default kwargs to """ @functools.wraps(func) def wrapper(self, *args, **kwargs): merged = {} merged.update(self.defaults) merged.update(kwargs) return func(self, *args, **merged) return wrapper
python
def pass_defaults(func): """Decorator that returns a function named wrapper. When invoked, wrapper invokes func with default kwargs appended. Parameters ---------- func : callable The function to append the default kwargs to """ @functools.wraps(func) def wrapper(self, *args, **kwargs): merged = {} merged.update(self.defaults) merged.update(kwargs) return func(self, *args, **merged) return wrapper
[ "def", "pass_defaults", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "merged", "=", "{", "}", "merged", ".", "update", "(", "self", ".", "defaults", ")", "merged", ".", "update", "(", "kwargs", ")", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "merged", ")", "return", "wrapper" ]
Decorator that returns a function named wrapper. When invoked, wrapper invokes func with default kwargs appended. Parameters ---------- func : callable The function to append the default kwargs to
[ "Decorator", "that", "returns", "a", "function", "named", "wrapper", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/http.py#L23-L39
234,673
ipfs/py-ipfs-api
ipfsapi/http.py
HTTPClient.request
def request(self, path, args=[], files=[], opts={}, stream=False, decoder=None, headers={}, data=None): """Makes an HTTP request to the IPFS daemon. This function returns the contents of the HTTP response from the IPFS daemon. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send args : list Positional parameters to be sent along with the HTTP request files : :class:`io.RawIOBase` | :obj:`str` | :obj:`list` The file object(s) or path(s) to stream to the daemon opts : dict Query string paramters to be sent along with the HTTP request decoder : str The encoder to use to parse the HTTP response kwargs : dict Additional arguments to pass to :mod:`requests` """ url = self.base + path params = [] params.append(('stream-channels', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'post' if (files or data) else 'get' parser = encoding.get_encoding(decoder if decoder else "none") return self._request(method, url, params, parser, stream, files, headers, data)
python
def request(self, path, args=[], files=[], opts={}, stream=False, decoder=None, headers={}, data=None): """Makes an HTTP request to the IPFS daemon. This function returns the contents of the HTTP response from the IPFS daemon. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send args : list Positional parameters to be sent along with the HTTP request files : :class:`io.RawIOBase` | :obj:`str` | :obj:`list` The file object(s) or path(s) to stream to the daemon opts : dict Query string paramters to be sent along with the HTTP request decoder : str The encoder to use to parse the HTTP response kwargs : dict Additional arguments to pass to :mod:`requests` """ url = self.base + path params = [] params.append(('stream-channels', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'post' if (files or data) else 'get' parser = encoding.get_encoding(decoder if decoder else "none") return self._request(method, url, params, parser, stream, files, headers, data)
[ "def", "request", "(", "self", ",", "path", ",", "args", "=", "[", "]", ",", "files", "=", "[", "]", ",", "opts", "=", "{", "}", ",", "stream", "=", "False", ",", "decoder", "=", "None", ",", "headers", "=", "{", "}", ",", "data", "=", "None", ")", ":", "url", "=", "self", ".", "base", "+", "path", "params", "=", "[", "]", "params", ".", "append", "(", "(", "'stream-channels'", ",", "'true'", ")", ")", "for", "opt", "in", "opts", ".", "items", "(", ")", ":", "params", ".", "append", "(", "opt", ")", "for", "arg", "in", "args", ":", "params", ".", "append", "(", "(", "'arg'", ",", "arg", ")", ")", "method", "=", "'post'", "if", "(", "files", "or", "data", ")", "else", "'get'", "parser", "=", "encoding", ".", "get_encoding", "(", "decoder", "if", "decoder", "else", "\"none\"", ")", "return", "self", ".", "_request", "(", "method", ",", "url", ",", "params", ",", "parser", ",", "stream", ",", "files", ",", "headers", ",", "data", ")" ]
Makes an HTTP request to the IPFS daemon. This function returns the contents of the HTTP response from the IPFS daemon. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send args : list Positional parameters to be sent along with the HTTP request files : :class:`io.RawIOBase` | :obj:`str` | :obj:`list` The file object(s) or path(s) to stream to the daemon opts : dict Query string paramters to be sent along with the HTTP request decoder : str The encoder to use to parse the HTTP response kwargs : dict Additional arguments to pass to :mod:`requests`
[ "Makes", "an", "HTTP", "request", "to", "the", "IPFS", "daemon", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/http.py#L202-L247
234,674
ipfs/py-ipfs-api
ipfsapi/http.py
HTTPClient.download
def download(self, path, args=[], filepath=None, opts={}, compress=True, **kwargs): """Makes a request to the IPFS daemon to download a file. Downloads a file or files from IPFS into the current working directory, or the directory given by ``filepath``. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send filepath : str The local path where IPFS will store downloaded files Defaults to the current working directory. args : list Positional parameters to be sent along with the HTTP request opts : dict Query string paramters to be sent along with the HTTP request compress : bool Whether the downloaded file should be GZip compressed by the daemon before being sent to the client kwargs : dict Additional arguments to pass to :mod:`requests` """ url = self.base + path wd = filepath or '.' params = [] params.append(('stream-channels', 'true')) params.append(('archive', 'true')) if compress: params.append(('compress', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'get' res = self._do_request(method, url, params=params, stream=True, **kwargs) self._do_raise_for_status(res) # try to stream download as a tar file stream mode = 'r|gz' if compress else 'r|' with tarfile.open(fileobj=res.raw, mode=mode) as tf: tf.extractall(path=wd)
python
def download(self, path, args=[], filepath=None, opts={}, compress=True, **kwargs): """Makes a request to the IPFS daemon to download a file. Downloads a file or files from IPFS into the current working directory, or the directory given by ``filepath``. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send filepath : str The local path where IPFS will store downloaded files Defaults to the current working directory. args : list Positional parameters to be sent along with the HTTP request opts : dict Query string paramters to be sent along with the HTTP request compress : bool Whether the downloaded file should be GZip compressed by the daemon before being sent to the client kwargs : dict Additional arguments to pass to :mod:`requests` """ url = self.base + path wd = filepath or '.' params = [] params.append(('stream-channels', 'true')) params.append(('archive', 'true')) if compress: params.append(('compress', 'true')) for opt in opts.items(): params.append(opt) for arg in args: params.append(('arg', arg)) method = 'get' res = self._do_request(method, url, params=params, stream=True, **kwargs) self._do_raise_for_status(res) # try to stream download as a tar file stream mode = 'r|gz' if compress else 'r|' with tarfile.open(fileobj=res.raw, mode=mode) as tf: tf.extractall(path=wd)
[ "def", "download", "(", "self", ",", "path", ",", "args", "=", "[", "]", ",", "filepath", "=", "None", ",", "opts", "=", "{", "}", ",", "compress", "=", "True", ",", "*", "*", "kwargs", ")", ":", "url", "=", "self", ".", "base", "+", "path", "wd", "=", "filepath", "or", "'.'", "params", "=", "[", "]", "params", ".", "append", "(", "(", "'stream-channels'", ",", "'true'", ")", ")", "params", ".", "append", "(", "(", "'archive'", ",", "'true'", ")", ")", "if", "compress", ":", "params", ".", "append", "(", "(", "'compress'", ",", "'true'", ")", ")", "for", "opt", "in", "opts", ".", "items", "(", ")", ":", "params", ".", "append", "(", "opt", ")", "for", "arg", "in", "args", ":", "params", ".", "append", "(", "(", "'arg'", ",", "arg", ")", ")", "method", "=", "'get'", "res", "=", "self", ".", "_do_request", "(", "method", ",", "url", ",", "params", "=", "params", ",", "stream", "=", "True", ",", "*", "*", "kwargs", ")", "self", ".", "_do_raise_for_status", "(", "res", ")", "# try to stream download as a tar file stream", "mode", "=", "'r|gz'", "if", "compress", "else", "'r|'", "with", "tarfile", ".", "open", "(", "fileobj", "=", "res", ".", "raw", ",", "mode", "=", "mode", ")", "as", "tf", ":", "tf", ".", "extractall", "(", "path", "=", "wd", ")" ]
Makes a request to the IPFS daemon to download a file. Downloads a file or files from IPFS into the current working directory, or the directory given by ``filepath``. Raises ------ ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError Parameters ---------- path : str The REST command path to send filepath : str The local path where IPFS will store downloaded files Defaults to the current working directory. args : list Positional parameters to be sent along with the HTTP request opts : dict Query string paramters to be sent along with the HTTP request compress : bool Whether the downloaded file should be GZip compressed by the daemon before being sent to the client kwargs : dict Additional arguments to pass to :mod:`requests`
[ "Makes", "a", "request", "to", "the", "IPFS", "daemon", "to", "download", "a", "file", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/http.py#L250-L308
234,675
ipfs/py-ipfs-api
ipfsapi/http.py
HTTPClient.session
def session(self): """A context manager for this client's session. This function closes the current session when this client goes out of scope. """ self._session = requests.session() yield self._session.close() self._session = None
python
def session(self): """A context manager for this client's session. This function closes the current session when this client goes out of scope. """ self._session = requests.session() yield self._session.close() self._session = None
[ "def", "session", "(", "self", ")", ":", "self", ".", "_session", "=", "requests", ".", "session", "(", ")", "yield", "self", ".", "_session", ".", "close", "(", ")", "self", ".", "_session", "=", "None" ]
A context manager for this client's session. This function closes the current session when this client goes out of scope.
[ "A", "context", "manager", "for", "this", "client", "s", "session", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/http.py#L311-L320
234,676
ipfs/py-ipfs-api
ipfsapi/client.py
assert_version
def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): """Make sure that the given daemon version is supported by this client version. Raises ------ ~ipfsapi.exceptions.VersionMismatch Parameters ---------- version : str The version of an IPFS daemon. minimum : str The minimal IPFS version to allow. maximum : str The maximum IPFS version to allow. """ # Convert version strings to integer tuples version = list(map(int, version.split('-', 1)[0].split('.'))) minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) if minimum > version or version >= maximum: raise exceptions.VersionMismatch(version, minimum, maximum)
python
def assert_version(version, minimum=VERSION_MINIMUM, maximum=VERSION_MAXIMUM): """Make sure that the given daemon version is supported by this client version. Raises ------ ~ipfsapi.exceptions.VersionMismatch Parameters ---------- version : str The version of an IPFS daemon. minimum : str The minimal IPFS version to allow. maximum : str The maximum IPFS version to allow. """ # Convert version strings to integer tuples version = list(map(int, version.split('-', 1)[0].split('.'))) minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) if minimum > version or version >= maximum: raise exceptions.VersionMismatch(version, minimum, maximum)
[ "def", "assert_version", "(", "version", ",", "minimum", "=", "VERSION_MINIMUM", ",", "maximum", "=", "VERSION_MAXIMUM", ")", ":", "# Convert version strings to integer tuples", "version", "=", "list", "(", "map", "(", "int", ",", "version", ".", "split", "(", "'-'", ",", "1", ")", "[", "0", "]", ".", "split", "(", "'.'", ")", ")", ")", "minimum", "=", "list", "(", "map", "(", "int", ",", "minimum", ".", "split", "(", "'-'", ",", "1", ")", "[", "0", "]", ".", "split", "(", "'.'", ")", ")", ")", "maximum", "=", "list", "(", "map", "(", "int", ",", "maximum", ".", "split", "(", "'-'", ",", "1", ")", "[", "0", "]", ".", "split", "(", "'.'", ")", ")", ")", "if", "minimum", ">", "version", "or", "version", ">=", "maximum", ":", "raise", "exceptions", ".", "VersionMismatch", "(", "version", ",", "minimum", ",", "maximum", ")" ]
Make sure that the given daemon version is supported by this client version. Raises ------ ~ipfsapi.exceptions.VersionMismatch Parameters ---------- version : str The version of an IPFS daemon. minimum : str The minimal IPFS version to allow. maximum : str The maximum IPFS version to allow.
[ "Make", "sure", "that", "the", "given", "daemon", "version", "is", "supported", "by", "this", "client", "version", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L23-L46
234,677
ipfs/py-ipfs-api
ipfsapi/client.py
Client.add
def add(self, files, recursive=False, pattern='**', *args, **kwargs): """Add a file, or directory of files to IPFS. .. code-block:: python >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: ... numbytes = f.write('Mary had a little lamb') >>> c.add('nurseryrhyme.txt') {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', 'Name': 'nurseryrhyme.txt'} Parameters ---------- files : str A filepath to either a file or directory recursive : bool Controls if files in subdirectories are added or not pattern : str | list Single `*glob* <https://docs.python.org/3/library/glob.html>`_ pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep trickle : bool Use trickle-dag format (optimized for streaming) when generating the dag; see `the FAQ <https://github.com/ipfs/faq/issues/218>` for more information (Default: ``False``) only_hash : bool Only chunk and hash, but do not write to disk (Default: ``False``) wrap_with_directory : bool Wrap files with a directory object to preserve their filename (Default: ``False``) chunker : str The chunking algorithm to use pin : bool Pin this object when adding (Default: ``True``) Returns ------- dict: File name and hash of the added file node """ #PY2: No support for kw-only parameters after glob parameters opts = { "trickle": kwargs.pop("trickle", False), "only-hash": kwargs.pop("only_hash", False), "wrap-with-directory": kwargs.pop("wrap_with_directory", False), "pin": kwargs.pop("pin", True) } if "chunker" in kwargs: opts["chunker"] = kwargs.pop("chunker") kwargs.setdefault("opts", opts) body, headers = multipart.stream_filesystem_node( files, recursive, pattern, self.chunk_size ) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs)
python
def add(self, files, recursive=False, pattern='**', *args, **kwargs): """Add a file, or directory of files to IPFS. .. code-block:: python >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: ... numbytes = f.write('Mary had a little lamb') >>> c.add('nurseryrhyme.txt') {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', 'Name': 'nurseryrhyme.txt'} Parameters ---------- files : str A filepath to either a file or directory recursive : bool Controls if files in subdirectories are added or not pattern : str | list Single `*glob* <https://docs.python.org/3/library/glob.html>`_ pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep trickle : bool Use trickle-dag format (optimized for streaming) when generating the dag; see `the FAQ <https://github.com/ipfs/faq/issues/218>` for more information (Default: ``False``) only_hash : bool Only chunk and hash, but do not write to disk (Default: ``False``) wrap_with_directory : bool Wrap files with a directory object to preserve their filename (Default: ``False``) chunker : str The chunking algorithm to use pin : bool Pin this object when adding (Default: ``True``) Returns ------- dict: File name and hash of the added file node """ #PY2: No support for kw-only parameters after glob parameters opts = { "trickle": kwargs.pop("trickle", False), "only-hash": kwargs.pop("only_hash", False), "wrap-with-directory": kwargs.pop("wrap_with_directory", False), "pin": kwargs.pop("pin", True) } if "chunker" in kwargs: opts["chunker"] = kwargs.pop("chunker") kwargs.setdefault("opts", opts) body, headers = multipart.stream_filesystem_node( files, recursive, pattern, self.chunk_size ) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs)
[ "def", "add", "(", "self", ",", "files", ",", "recursive", "=", "False", ",", "pattern", "=", "'**'", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "#PY2: No support for kw-only parameters after glob parameters", "opts", "=", "{", "\"trickle\"", ":", "kwargs", ".", "pop", "(", "\"trickle\"", ",", "False", ")", ",", "\"only-hash\"", ":", "kwargs", ".", "pop", "(", "\"only_hash\"", ",", "False", ")", ",", "\"wrap-with-directory\"", ":", "kwargs", ".", "pop", "(", "\"wrap_with_directory\"", ",", "False", ")", ",", "\"pin\"", ":", "kwargs", ".", "pop", "(", "\"pin\"", ",", "True", ")", "}", "if", "\"chunker\"", "in", "kwargs", ":", "opts", "[", "\"chunker\"", "]", "=", "kwargs", ".", "pop", "(", "\"chunker\"", ")", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "opts", ")", "body", ",", "headers", "=", "multipart", ".", "stream_filesystem_node", "(", "files", ",", "recursive", ",", "pattern", ",", "self", ".", "chunk_size", ")", "return", "self", ".", "_client", ".", "request", "(", "'/add'", ",", "decoder", "=", "'json'", ",", "data", "=", "body", ",", "headers", "=", "headers", ",", "*", "*", "kwargs", ")" ]
Add a file, or directory of files to IPFS. .. code-block:: python >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: ... numbytes = f.write('Mary had a little lamb') >>> c.add('nurseryrhyme.txt') {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', 'Name': 'nurseryrhyme.txt'} Parameters ---------- files : str A filepath to either a file or directory recursive : bool Controls if files in subdirectories are added or not pattern : str | list Single `*glob* <https://docs.python.org/3/library/glob.html>`_ pattern or list of *glob* patterns and compiled regular expressions to match the names of the filepaths to keep trickle : bool Use trickle-dag format (optimized for streaming) when generating the dag; see `the FAQ <https://github.com/ipfs/faq/issues/218>` for more information (Default: ``False``) only_hash : bool Only chunk and hash, but do not write to disk (Default: ``False``) wrap_with_directory : bool Wrap files with a directory object to preserve their filename (Default: ``False``) chunker : str The chunking algorithm to use pin : bool Pin this object when adding (Default: ``True``) Returns ------- dict: File name and hash of the added file node
[ "Add", "a", "file", "or", "directory", "of", "files", "to", "IPFS", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L135-L189
234,678
ipfs/py-ipfs-api
ipfsapi/client.py
Client.get
def get(self, multihash, **kwargs): """Downloads a file, or directory of files from IPFS. Files are placed in the current working directory. Parameters ---------- multihash : str The path to the IPFS object(s) to be outputted """ args = (multihash,) return self._client.download('/get', args, **kwargs)
python
def get(self, multihash, **kwargs): """Downloads a file, or directory of files from IPFS. Files are placed in the current working directory. Parameters ---------- multihash : str The path to the IPFS object(s) to be outputted """ args = (multihash,) return self._client.download('/get', args, **kwargs)
[ "def", "get", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "download", "(", "'/get'", ",", "args", ",", "*", "*", "kwargs", ")" ]
Downloads a file, or directory of files from IPFS. Files are placed in the current working directory. Parameters ---------- multihash : str The path to the IPFS object(s) to be outputted
[ "Downloads", "a", "file", "or", "directory", "of", "files", "from", "IPFS", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L191-L202
234,679
ipfs/py-ipfs-api
ipfsapi/client.py
Client.cat
def cat(self, multihash, offset=0, length=-1, **kwargs): r"""Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'<!DOCTYPE html>\n<html>\n\n<head>\n<title>ipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- str : File contents """ opts = {} if offset != 0: opts['offset'] = offset if length != -1: opts['length'] = length args = (multihash,) return self._client.request('/cat', args, opts=opts, **kwargs)
python
def cat(self, multihash, offset=0, length=-1, **kwargs): r"""Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'<!DOCTYPE html>\n<html>\n\n<head>\n<title>ipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- str : File contents """ opts = {} if offset != 0: opts['offset'] = offset if length != -1: opts['length'] = length args = (multihash,) return self._client.request('/cat', args, opts=opts, **kwargs)
[ "def", "cat", "(", "self", ",", "multihash", ",", "offset", "=", "0", ",", "length", "=", "-", "1", ",", "*", "*", "kwargs", ")", ":", "opts", "=", "{", "}", "if", "offset", "!=", "0", ":", "opts", "[", "'offset'", "]", "=", "offset", "if", "length", "!=", "-", "1", ":", "opts", "[", "'length'", "]", "=", "length", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/cat'", ",", "args", ",", "opts", "=", "opts", ",", "*", "*", "kwargs", ")" ]
r"""Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'<!DOCTYPE html>\n<html>\n\n<head>\n<title>ipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- str : File contents
[ "r", "Retrieves", "the", "contents", "of", "a", "file", "identified", "by", "hash", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L204-L235
234,680
ipfs/py-ipfs-api
ipfsapi/client.py
Client.ls
def ls(self, multihash, **kwargs): """Returns a list of objects linked to by the given hash. .. code-block:: python >>> c.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Objects': [ {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174, 'Type': 2}, … {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55, 'Type': 2} ]} ]} Parameters ---------- multihash : str The path to the IPFS object(s) to list links from Returns ------- dict : Directory information and contents """ args = (multihash,) return self._client.request('/ls', args, decoder='json', **kwargs)
python
def ls(self, multihash, **kwargs): """Returns a list of objects linked to by the given hash. .. code-block:: python >>> c.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Objects': [ {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174, 'Type': 2}, … {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55, 'Type': 2} ]} ]} Parameters ---------- multihash : str The path to the IPFS object(s) to list links from Returns ------- dict : Directory information and contents """ args = (multihash,) return self._client.request('/ls', args, decoder='json', **kwargs)
[ "def", "ls", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/ls'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Returns a list of objects linked to by the given hash. .. code-block:: python >>> c.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Objects': [ {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174, 'Type': 2}, … {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55, 'Type': 2} ]} ]} Parameters ---------- multihash : str The path to the IPFS object(s) to list links from Returns ------- dict : Directory information and contents
[ "Returns", "a", "list", "of", "objects", "linked", "to", "by", "the", "given", "hash", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L237-L264
234,681
ipfs/py-ipfs-api
ipfsapi/client.py
Client.refs
def refs(self, multihash, **kwargs): """Returns a list of hashes of objects referenced by the given hash. .. code-block:: python >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- multihash : str Path to the object(s) to list refs from Returns ------- list """ args = (multihash,) return self._client.request('/refs', args, decoder='json', **kwargs)
python
def refs(self, multihash, **kwargs): """Returns a list of hashes of objects referenced by the given hash. .. code-block:: python >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- multihash : str Path to the object(s) to list refs from Returns ------- list """ args = (multihash,) return self._client.request('/refs', args, decoder='json', **kwargs)
[ "def", "refs", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/refs'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Returns a list of hashes of objects referenced by the given hash. .. code-block:: python >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- multihash : str Path to the object(s) to list refs from Returns ------- list
[ "Returns", "a", "list", "of", "hashes", "of", "objects", "referenced", "by", "the", "given", "hash", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L266-L286
234,682
ipfs/py-ipfs-api
ipfsapi/client.py
Client.block_stat
def block_stat(self, multihash, **kwargs): """Returns a dict with the size of the block with the given hash. .. code-block:: python >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- multihash : str The base58 multihash of an existing block to stat Returns ------- dict : Information about the requested block """ args = (multihash,) return self._client.request('/block/stat', args, decoder='json', **kwargs)
python
def block_stat(self, multihash, **kwargs): """Returns a dict with the size of the block with the given hash. .. code-block:: python >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- multihash : str The base58 multihash of an existing block to stat Returns ------- dict : Information about the requested block """ args = (multihash,) return self._client.request('/block/stat', args, decoder='json', **kwargs)
[ "def", "block_stat", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/block/stat'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Returns a dict with the size of the block with the given hash. .. code-block:: python >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- multihash : str The base58 multihash of an existing block to stat Returns ------- dict : Information about the requested block
[ "Returns", "a", "dict", "with", "the", "size", "of", "the", "block", "with", "the", "given", "hash", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L304-L324
234,683
ipfs/py-ipfs-api
ipfsapi/client.py
Client.block_get
def block_get(self, multihash, **kwargs): r"""Returns the raw contents of a block. .. code-block:: python >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- multihash : str The base58 multihash of an existing block to get Returns ------- str : Value of the requested block """ args = (multihash,) return self._client.request('/block/get', args, **kwargs)
python
def block_get(self, multihash, **kwargs): r"""Returns the raw contents of a block. .. code-block:: python >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- multihash : str The base58 multihash of an existing block to get Returns ------- str : Value of the requested block """ args = (multihash,) return self._client.request('/block/get', args, **kwargs)
[ "def", "block_get", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/block/get'", ",", "args", ",", "*", "*", "kwargs", ")" ]
r"""Returns the raw contents of a block. .. code-block:: python >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- multihash : str The base58 multihash of an existing block to get Returns ------- str : Value of the requested block
[ "r", "Returns", "the", "raw", "contents", "of", "a", "block", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L326-L344
234,684
ipfs/py-ipfs-api
ipfsapi/client.py
Client.bitswap_wantlist
def bitswap_wantlist(self, peer=None, **kwargs): """Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> c.bitswap_wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict : List of wanted blocks """ args = (peer,) return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs)
python
def bitswap_wantlist(self, peer=None, **kwargs): """Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> c.bitswap_wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict : List of wanted blocks """ args = (peer,) return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs)
[ "def", "bitswap_wantlist", "(", "self", ",", "peer", "=", "None", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "peer", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/bitswap/wantlist'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> c.bitswap_wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict : List of wanted blocks
[ "Returns", "blocks", "currently", "on", "the", "bitswap", "wantlist", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L370-L393
234,685
ipfs/py-ipfs-api
ipfsapi/client.py
Client.bitswap_unwant
def bitswap_unwant(self, key, **kwargs): """ Remove a given block from wantlist. Parameters ---------- key : str Key to remove from wantlist. """ args = (key,) return self._client.request('/bitswap/unwant', args, **kwargs)
python
def bitswap_unwant(self, key, **kwargs): """ Remove a given block from wantlist. Parameters ---------- key : str Key to remove from wantlist. """ args = (key,) return self._client.request('/bitswap/unwant', args, **kwargs)
[ "def", "bitswap_unwant", "(", "self", ",", "key", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "key", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/bitswap/unwant'", ",", "args", ",", "*", "*", "kwargs", ")" ]
Remove a given block from wantlist. Parameters ---------- key : str Key to remove from wantlist.
[ "Remove", "a", "given", "block", "from", "wantlist", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L424-L434
234,686
ipfs/py-ipfs-api
ipfsapi/client.py
Client.object_data
def object_data(self, multihash, **kwargs): r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- str : Raw object data """ args = (multihash,) return self._client.request('/object/data', args, **kwargs)
python
def object_data(self, multihash, **kwargs): r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- str : Raw object data """ args = (multihash,) return self._client.request('/object/data', args, **kwargs)
[ "def", "object_data", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/object/data'", ",", "args", ",", "*", "*", "kwargs", ")" ]
r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- str : Raw object data
[ "r", "Returns", "the", "raw", "bytes", "in", "an", "IPFS", "object", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L436-L454
234,687
ipfs/py-ipfs-api
ipfsapi/client.py
Client.object_new
def object_new(self, template=None, **kwargs): """Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> c.object_new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict : Object hash """ args = (template,) if template is not None else () return self._client.request('/object/new', args, decoder='json', **kwargs)
python
def object_new(self, template=None, **kwargs): """Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> c.object_new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict : Object hash """ args = (template,) if template is not None else () return self._client.request('/object/new', args, decoder='json', **kwargs)
[ "def", "object_new", "(", "self", ",", "template", "=", "None", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "template", ",", ")", "if", "template", "is", "not", "None", "else", "(", ")", "return", "self", ".", "_client", ".", "request", "(", "'/object/new'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> c.object_new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict : Object hash
[ "Creates", "a", "new", "object", "from", "an", "IPFS", "template", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L456-L481
234,688
ipfs/py-ipfs-api
ipfsapi/client.py
Client.object_links
def object_links(self, multihash, **kwargs): """Returns the links pointed to by the specified object. .. code-block:: python >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object hash and merkedag links """ args = (multihash,) return self._client.request('/object/links', args, decoder='json', **kwargs)
python
def object_links(self, multihash, **kwargs): """Returns the links pointed to by the specified object. .. code-block:: python >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object hash and merkedag links """ args = (multihash,) return self._client.request('/object/links', args, decoder='json', **kwargs)
[ "def", "object_links", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/object/links'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Returns the links pointed to by the specified object. .. code-block:: python >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object hash and merkedag links
[ "Returns", "the", "links", "pointed", "to", "by", "the", "specified", "object", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L483-L513
234,689
ipfs/py-ipfs-api
ipfsapi/client.py
Client.object_get
def object_get(self, multihash, **kwargs): """Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links """ args = (multihash,) return self._client.request('/object/get', args, decoder='json', **kwargs)
python
def object_get(self, multihash, **kwargs): """Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links """ args = (multihash,) return self._client.request('/object/get', args, decoder='json', **kwargs)
[ "def", "object_get", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/object/get'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links
[ "Get", "and", "serialize", "the", "DAG", "node", "named", "by", "multihash", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L515-L545
234,690
ipfs/py-ipfs-api
ipfsapi/client.py
Client.object_put
def object_put(self, file, **kwargs): """Stores input as a DAG object and returns its key. .. code-block:: python >>> c.object_put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : io.RawIOBase (JSON) object from which the DAG object will be created Returns ------- dict : Hash and links of the created DAG object See :meth:`~ipfsapi.Object.object_links` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/object/put', decoder='json', data=body, headers=headers, **kwargs)
python
def object_put(self, file, **kwargs): """Stores input as a DAG object and returns its key. .. code-block:: python >>> c.object_put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : io.RawIOBase (JSON) object from which the DAG object will be created Returns ------- dict : Hash and links of the created DAG object See :meth:`~ipfsapi.Object.object_links` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/object/put', decoder='json', data=body, headers=headers, **kwargs)
[ "def", "object_put", "(", "self", ",", "file", ",", "*", "*", "kwargs", ")", ":", "body", ",", "headers", "=", "multipart", ".", "stream_files", "(", "file", ",", "self", ".", "chunk_size", ")", "return", "self", ".", "_client", ".", "request", "(", "'/object/put'", ",", "decoder", "=", "'json'", ",", "data", "=", "body", ",", "headers", "=", "headers", ",", "*", "*", "kwargs", ")" ]
Stores input as a DAG object and returns its key. .. code-block:: python >>> c.object_put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : io.RawIOBase (JSON) object from which the DAG object will be created Returns ------- dict : Hash and links of the created DAG object See :meth:`~ipfsapi.Object.object_links`
[ "Stores", "input", "as", "a", "DAG", "object", "and", "returns", "its", "key", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L547-L581
234,691
ipfs/py-ipfs-api
ipfsapi/client.py
Client.object_stat
def object_stat(self, multihash, **kwargs): """Get stats for the DAG node named by multihash. .. code-block:: python >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict """ args = (multihash,) return self._client.request('/object/stat', args, decoder='json', **kwargs)
python
def object_stat(self, multihash, **kwargs): """Get stats for the DAG node named by multihash. .. code-block:: python >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict """ args = (multihash,) return self._client.request('/object/stat', args, decoder='json', **kwargs)
[ "def", "object_stat", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/object/stat'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Get stats for the DAG node named by multihash. .. code-block:: python >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict
[ "Get", "stats", "for", "the", "DAG", "node", "named", "by", "multihash", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L583-L604
234,692
ipfs/py-ipfs-api
ipfsapi/client.py
Client.file_ls
def file_ls(self, multihash, **kwargs): """Lists directory contents for Unix filesystem objects. The result contains size information. For files, the child size is the total size of the file contents. For directories, the child size is the IPFS link size. The path can be a prefixless reference; in this case, it is assumed that it is an ``/ipfs/`` reference and not ``/ipns/``. .. code-block:: python >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, 'Objects': { 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 0, 'Type': 'Directory', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1463, 'Type': 'File'}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 47, 'Type': 'File'} ] } }} Parameters ---------- multihash : str The path to the object(s) to list links from Returns ------- dict """ args = (multihash,) return self._client.request('/file/ls', args, decoder='json', **kwargs)
python
def file_ls(self, multihash, **kwargs): """Lists directory contents for Unix filesystem objects. The result contains size information. For files, the child size is the total size of the file contents. For directories, the child size is the IPFS link size. The path can be a prefixless reference; in this case, it is assumed that it is an ``/ipfs/`` reference and not ``/ipns/``. .. code-block:: python >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, 'Objects': { 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 0, 'Type': 'Directory', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1463, 'Type': 'File'}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 47, 'Type': 'File'} ] } }} Parameters ---------- multihash : str The path to the object(s) to list links from Returns ------- dict """ args = (multihash,) return self._client.request('/file/ls', args, decoder='json', **kwargs)
[ "def", "file_ls", "(", "self", ",", "multihash", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "multihash", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/file/ls'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Lists directory contents for Unix filesystem objects. The result contains size information. For files, the child size is the total size of the file contents. For directories, the child size is the IPFS link size. The path can be a prefixless reference; in this case, it is assumed that it is an ``/ipfs/`` reference and not ``/ipns/``. .. code-block:: python >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, 'Objects': { 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 0, 'Type': 'Directory', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1463, 'Type': 'File'}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 47, 'Type': 'File'} ] } }} Parameters ---------- multihash : str The path to the object(s) to list links from Returns ------- dict
[ "Lists", "directory", "contents", "for", "Unix", "filesystem", "objects", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L728-L773
234,693
ipfs/py-ipfs-api
ipfsapi/client.py
Client.resolve
def resolve(self, name, recursive=False, **kwargs): """Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> c.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict : IPFS path of resource """ kwargs.setdefault("opts", {"recursive": recursive}) args = (name,) return self._client.request('/resolve', args, decoder='json', **kwargs)
python
def resolve(self, name, recursive=False, **kwargs): """Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> c.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict : IPFS path of resource """ kwargs.setdefault("opts", {"recursive": recursive}) args = (name,) return self._client.request('/resolve', args, decoder='json', **kwargs)
[ "def", "resolve", "(", "self", ",", "name", ",", "recursive", "=", "False", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "{", "\"recursive\"", ":", "recursive", "}", ")", "args", "=", "(", "name", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/resolve'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> c.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict : IPFS path of resource
[ "Accepts", "an", "identifier", "and", "resolves", "it", "to", "the", "referenced", "item", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L775-L805
234,694
ipfs/py-ipfs-api
ipfsapi/client.py
Client.key_gen
def key_gen(self, key_name, type, size=2048, **kwargs): """Adds a new public key that can be used for name_publish. .. code-block:: python >>> c.key_gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict : Key name and Key Id """ opts = {"type": type, "size": size} kwargs.setdefault("opts", opts) args = (key_name,) return self._client.request('/key/gen', args, decoder='json', **kwargs)
python
def key_gen(self, key_name, type, size=2048, **kwargs): """Adds a new public key that can be used for name_publish. .. code-block:: python >>> c.key_gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict : Key name and Key Id """ opts = {"type": type, "size": size} kwargs.setdefault("opts", opts) args = (key_name,) return self._client.request('/key/gen', args, decoder='json', **kwargs)
[ "def", "key_gen", "(", "self", ",", "key_name", ",", "type", ",", "size", "=", "2048", ",", "*", "*", "kwargs", ")", ":", "opts", "=", "{", "\"type\"", ":", "type", ",", "\"size\"", ":", "size", "}", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "opts", ")", "args", "=", "(", "key_name", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/key/gen'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Adds a new public key that can be used for name_publish. .. code-block:: python >>> c.key_gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict : Key name and Key Id
[ "Adds", "a", "new", "public", "key", "that", "can", "be", "used", "for", "name_publish", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L825-L856
234,695
ipfs/py-ipfs-api
ipfsapi/client.py
Client.key_rm
def key_rm(self, key_name, *key_names, **kwargs): """Remove a keypair .. code-block:: python >>> c.key_rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name,) + key_names return self._client.request('/key/rm', args, decoder='json', **kwargs)
python
def key_rm(self, key_name, *key_names, **kwargs): """Remove a keypair .. code-block:: python >>> c.key_rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name,) + key_names return self._client.request('/key/rm', args, decoder='json', **kwargs)
[ "def", "key_rm", "(", "self", ",", "key_name", ",", "*", "key_names", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "key_name", ",", ")", "+", "key_names", "return", "self", ".", "_client", ".", "request", "(", "'/key/rm'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Remove a keypair .. code-block:: python >>> c.key_rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict : List of key names and IDs that have been removed
[ "Remove", "a", "keypair" ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L858-L879
234,696
ipfs/py-ipfs-api
ipfsapi/client.py
Client.key_rename
def key_rename(self, key_name, new_key_name, **kwargs): """Rename a keypair .. code-block:: python >>> c.key_rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name, new_key_name) return self._client.request('/key/rename', args, decoder='json', **kwargs)
python
def key_rename(self, key_name, new_key_name, **kwargs): """Rename a keypair .. code-block:: python >>> c.key_rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name, new_key_name) return self._client.request('/key/rename', args, decoder='json', **kwargs)
[ "def", "key_rename", "(", "self", ",", "key_name", ",", "new_key_name", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "key_name", ",", "new_key_name", ")", "return", "self", ".", "_client", ".", "request", "(", "'/key/rename'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Rename a keypair .. code-block:: python >>> c.key_rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict : List of key names and IDs that have been removed
[ "Rename", "a", "keypair" ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L881-L905
234,697
ipfs/py-ipfs-api
ipfsapi/client.py
Client.name_publish
def name_publish(self, ipfs_path, resolve=True, lifetime="24h", ttl=None, key=None, **kwargs): """Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : int Time duration this record should be cached for key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict : IPNS hash and the IPFS path it points at """ opts = {"lifetime": lifetime, "resolve": resolve} if ttl: opts["ttl"] = ttl if key: opts["key"] = key kwargs.setdefault("opts", opts) args = (ipfs_path,) return self._client.request('/name/publish', args, decoder='json', **kwargs)
python
def name_publish(self, ipfs_path, resolve=True, lifetime="24h", ttl=None, key=None, **kwargs): """Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : int Time duration this record should be cached for key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict : IPNS hash and the IPFS path it points at """ opts = {"lifetime": lifetime, "resolve": resolve} if ttl: opts["ttl"] = ttl if key: opts["key"] = key kwargs.setdefault("opts", opts) args = (ipfs_path,) return self._client.request('/name/publish', args, decoder='json', **kwargs)
[ "def", "name_publish", "(", "self", ",", "ipfs_path", ",", "resolve", "=", "True", ",", "lifetime", "=", "\"24h\"", ",", "ttl", "=", "None", ",", "key", "=", "None", ",", "*", "*", "kwargs", ")", ":", "opts", "=", "{", "\"lifetime\"", ":", "lifetime", ",", "\"resolve\"", ":", "resolve", "}", "if", "ttl", ":", "opts", "[", "\"ttl\"", "]", "=", "ttl", "if", "key", ":", "opts", "[", "\"key\"", "]", "=", "key", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "opts", ")", "args", "=", "(", "ipfs_path", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/name/publish'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : int Time duration this record should be cached for key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict : IPNS hash and the IPFS path it points at
[ "Publishes", "an", "object", "to", "IPNS", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L907-L957
234,698
ipfs/py-ipfs-api
ipfsapi/client.py
Client.name_resolve
def name_resolve(self, name=None, recursive=False, nocache=False, **kwargs): """Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> c.name_resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) Returns ------- dict : The IPFS path the IPNS hash points at """ kwargs.setdefault("opts", {"recursive": recursive, "nocache": nocache}) args = (name,) if name is not None else () return self._client.request('/name/resolve', args, decoder='json', **kwargs)
python
def name_resolve(self, name=None, recursive=False, nocache=False, **kwargs): """Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> c.name_resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) Returns ------- dict : The IPFS path the IPNS hash points at """ kwargs.setdefault("opts", {"recursive": recursive, "nocache": nocache}) args = (name,) if name is not None else () return self._client.request('/name/resolve', args, decoder='json', **kwargs)
[ "def", "name_resolve", "(", "self", ",", "name", "=", "None", ",", "recursive", "=", "False", ",", "nocache", "=", "False", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "{", "\"recursive\"", ":", "recursive", ",", "\"nocache\"", ":", "nocache", "}", ")", "args", "=", "(", "name", ",", ")", "if", "name", "is", "not", "None", "else", "(", ")", "return", "self", ".", "_client", ".", "request", "(", "'/name/resolve'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> c.name_resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) Returns ------- dict : The IPFS path the IPNS hash points at
[ "Gets", "the", "value", "currently", "published", "at", "an", "IPNS", "name", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L959-L989
234,699
ipfs/py-ipfs-api
ipfsapi/client.py
Client.dns
def dns(self, domain_name, recursive=False, **kwargs): """Resolves DNS links to the referenced object. Multihashes are hard to remember, but domain names are usually easy to remember. To create memorable aliases for multihashes, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> c.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict : Resource were a DNS entry points to """ kwargs.setdefault("opts", {"recursive": recursive}) args = (domain_name,) return self._client.request('/dns', args, decoder='json', **kwargs)
python
def dns(self, domain_name, recursive=False, **kwargs): """Resolves DNS links to the referenced object. Multihashes are hard to remember, but domain names are usually easy to remember. To create memorable aliases for multihashes, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> c.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict : Resource were a DNS entry points to """ kwargs.setdefault("opts", {"recursive": recursive}) args = (domain_name,) return self._client.request('/dns', args, decoder='json', **kwargs)
[ "def", "dns", "(", "self", ",", "domain_name", ",", "recursive", "=", "False", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "{", "\"recursive\"", ":", "recursive", "}", ")", "args", "=", "(", "domain_name", ",", ")", "return", "self", ".", "_client", ".", "request", "(", "'/dns'", ",", "args", ",", "decoder", "=", "'json'", ",", "*", "*", "kwargs", ")" ]
Resolves DNS links to the referenced object. Multihashes are hard to remember, but domain names are usually easy to remember. To create memorable aliases for multihashes, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> c.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict : Resource were a DNS entry points to
[ "Resolves", "DNS", "links", "to", "the", "referenced", "object", "." ]
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L991-L1025