repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
Julius2342/pyvlx
old_api/pyvlx/devices.py
Devices.data_import
def data_import(self, json_response): """Import data from json response.""" if 'data' not in json_response: raise PyVLXException('no element data found: {0}'.format( json.dumps(json_response))) data = json_response['data'] for item in data: if 'category' not in item: raise PyVLXException('no element category: {0}'.format( json.dumps(item))) category = item['category'] if category == 'Window opener': self.load_window_opener(item) elif category in ['Roller shutter', 'Dual Shutter']: self.load_roller_shutter(item) elif category in ['Blind']: self.load_blind(item) else: self.pyvlx.logger.warning( 'WARNING: Could not parse product: %s', category)
python
def data_import(self, json_response): """Import data from json response.""" if 'data' not in json_response: raise PyVLXException('no element data found: {0}'.format( json.dumps(json_response))) data = json_response['data'] for item in data: if 'category' not in item: raise PyVLXException('no element category: {0}'.format( json.dumps(item))) category = item['category'] if category == 'Window opener': self.load_window_opener(item) elif category in ['Roller shutter', 'Dual Shutter']: self.load_roller_shutter(item) elif category in ['Blind']: self.load_blind(item) else: self.pyvlx.logger.warning( 'WARNING: Could not parse product: %s', category)
[ "def", "data_import", "(", "self", ",", "json_response", ")", ":", "if", "'data'", "not", "in", "json_response", ":", "raise", "PyVLXException", "(", "'no element data found: {0}'", ".", "format", "(", "json", ".", "dumps", "(", "json_response", ")", ")", ")", "data", "=", "json_response", "[", "'data'", "]", "for", "item", "in", "data", ":", "if", "'category'", "not", "in", "item", ":", "raise", "PyVLXException", "(", "'no element category: {0}'", ".", "format", "(", "json", ".", "dumps", "(", "item", ")", ")", ")", "category", "=", "item", "[", "'category'", "]", "if", "category", "==", "'Window opener'", ":", "self", ".", "load_window_opener", "(", "item", ")", "elif", "category", "in", "[", "'Roller shutter'", ",", "'Dual Shutter'", "]", ":", "self", ".", "load_roller_shutter", "(", "item", ")", "elif", "category", "in", "[", "'Blind'", "]", ":", "self", ".", "load_blind", "(", "item", ")", "else", ":", "self", ".", "pyvlx", ".", "logger", ".", "warning", "(", "'WARNING: Could not parse product: %s'", ",", "category", ")" ]
Import data from json response.
[ "Import", "data", "from", "json", "response", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/old_api/pyvlx/devices.py#L47-L67
Julius2342/pyvlx
old_api/pyvlx/devices.py
Devices.load_window_opener
def load_window_opener(self, item): """Load window opener from JSON.""" window = Window.from_config(self.pyvlx, item) self.add(window)
python
def load_window_opener(self, item): """Load window opener from JSON.""" window = Window.from_config(self.pyvlx, item) self.add(window)
[ "def", "load_window_opener", "(", "self", ",", "item", ")", ":", "window", "=", "Window", ".", "from_config", "(", "self", ".", "pyvlx", ",", "item", ")", "self", ".", "add", "(", "window", ")" ]
Load window opener from JSON.
[ "Load", "window", "opener", "from", "JSON", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/old_api/pyvlx/devices.py#L69-L72
Julius2342/pyvlx
old_api/pyvlx/devices.py
Devices.load_roller_shutter
def load_roller_shutter(self, item): """Load roller shutter from JSON.""" rollershutter = RollerShutter.from_config(self.pyvlx, item) self.add(rollershutter)
python
def load_roller_shutter(self, item): """Load roller shutter from JSON.""" rollershutter = RollerShutter.from_config(self.pyvlx, item) self.add(rollershutter)
[ "def", "load_roller_shutter", "(", "self", ",", "item", ")", ":", "rollershutter", "=", "RollerShutter", ".", "from_config", "(", "self", ".", "pyvlx", ",", "item", ")", "self", ".", "add", "(", "rollershutter", ")" ]
Load roller shutter from JSON.
[ "Load", "roller", "shutter", "from", "JSON", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/old_api/pyvlx/devices.py#L74-L77
Julius2342/pyvlx
old_api/pyvlx/devices.py
Devices.load_blind
def load_blind(self, item): """Load blind from JSON.""" blind = Blind.from_config(self.pyvlx, item) self.add(blind)
python
def load_blind(self, item): """Load blind from JSON.""" blind = Blind.from_config(self.pyvlx, item) self.add(blind)
[ "def", "load_blind", "(", "self", ",", "item", ")", ":", "blind", "=", "Blind", ".", "from_config", "(", "self", ".", "pyvlx", ",", "item", ")", "self", ".", "add", "(", "blind", ")" ]
Load blind from JSON.
[ "Load", "blind", "from", "JSON", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/old_api/pyvlx/devices.py#L79-L82
cs50/style50
style50/_api.py
get_terminal_size
def get_terminal_size(fallback=(80, 24)): """ Return tuple containing columns and rows of controlling terminal, trying harder than shutil.get_terminal_size to find a tty before returning fallback. Theoretically, stdout, stderr, and stdin could all be different ttys that could cause us to get the wrong measurements (instead of using the fallback) but the much more common case is that IO is piped. """ for stream in [sys.__stdout__, sys.__stderr__, sys.__stdin__]: try: # Make WINSIZE call to terminal data = fcntl.ioctl(stream.fileno(), TIOCGWINSZ, b"\x00\x00\00\x00") except OSError: pass else: # Unpack two shorts from ioctl call lines, columns = struct.unpack("hh", data) break else: columns, lines = fallback return columns, lines
python
def get_terminal_size(fallback=(80, 24)): """ Return tuple containing columns and rows of controlling terminal, trying harder than shutil.get_terminal_size to find a tty before returning fallback. Theoretically, stdout, stderr, and stdin could all be different ttys that could cause us to get the wrong measurements (instead of using the fallback) but the much more common case is that IO is piped. """ for stream in [sys.__stdout__, sys.__stderr__, sys.__stdin__]: try: # Make WINSIZE call to terminal data = fcntl.ioctl(stream.fileno(), TIOCGWINSZ, b"\x00\x00\00\x00") except OSError: pass else: # Unpack two shorts from ioctl call lines, columns = struct.unpack("hh", data) break else: columns, lines = fallback return columns, lines
[ "def", "get_terminal_size", "(", "fallback", "=", "(", "80", ",", "24", ")", ")", ":", "for", "stream", "in", "[", "sys", ".", "__stdout__", ",", "sys", ".", "__stderr__", ",", "sys", ".", "__stdin__", "]", ":", "try", ":", "# Make WINSIZE call to terminal", "data", "=", "fcntl", ".", "ioctl", "(", "stream", ".", "fileno", "(", ")", ",", "TIOCGWINSZ", ",", "b\"\\x00\\x00\\00\\x00\"", ")", "except", "OSError", ":", "pass", "else", ":", "# Unpack two shorts from ioctl call", "lines", ",", "columns", "=", "struct", ".", "unpack", "(", "\"hh\"", ",", "data", ")", "break", "else", ":", "columns", ",", "lines", "=", "fallback", "return", "columns", ",", "lines" ]
Return tuple containing columns and rows of controlling terminal, trying harder than shutil.get_terminal_size to find a tty before returning fallback. Theoretically, stdout, stderr, and stdin could all be different ttys that could cause us to get the wrong measurements (instead of using the fallback) but the much more common case is that IO is piped.
[ "Return", "tuple", "containing", "columns", "and", "rows", "of", "controlling", "terminal", "trying", "harder", "than", "shutil", ".", "get_terminal_size", "to", "find", "a", "tty", "before", "returning", "fallback", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L23-L45
cs50/style50
style50/_api.py
Style50.run_diff
def run_diff(self): """ Run checks on self.files, printing diff of styled/unstyled output to stdout. """ files = tuple(self.files) # Use same header as more. header, footer = (termcolor.colored("{0}\n{{}}\n{0}\n".format( ":" * 14), "cyan"), "\n") if len(files) > 1 else ("", "") for file in files: print(header.format(file), end="") try: results = self._check(file) except Error as e: termcolor.cprint(e.msg, "yellow", file=sys.stderr) continue # Display results if results.diffs: print() print(*self.diff(results.original, results.styled), sep="\n") print() conjunction = "And" else: termcolor.cprint("Looks good!", "green") conjunction = "But" if results.diffs: for type, c in sorted(self._warn_chars): color, verb = ("on_green", "insert") if type == "+" else ("on_red", "delete") termcolor.cprint(c, None, color, end="") termcolor.cprint(" means that you should {} a {}.".format( verb, "newline" if c == "\\n" else "tab"), "yellow") if results.comment_ratio < results.COMMENT_MIN: termcolor.cprint("{} consider adding more comments!".format(conjunction), "yellow") if (results.comment_ratio < results.COMMENT_MIN or self._warn_chars) and results.diffs: print()
python
def run_diff(self): """ Run checks on self.files, printing diff of styled/unstyled output to stdout. """ files = tuple(self.files) # Use same header as more. header, footer = (termcolor.colored("{0}\n{{}}\n{0}\n".format( ":" * 14), "cyan"), "\n") if len(files) > 1 else ("", "") for file in files: print(header.format(file), end="") try: results = self._check(file) except Error as e: termcolor.cprint(e.msg, "yellow", file=sys.stderr) continue # Display results if results.diffs: print() print(*self.diff(results.original, results.styled), sep="\n") print() conjunction = "And" else: termcolor.cprint("Looks good!", "green") conjunction = "But" if results.diffs: for type, c in sorted(self._warn_chars): color, verb = ("on_green", "insert") if type == "+" else ("on_red", "delete") termcolor.cprint(c, None, color, end="") termcolor.cprint(" means that you should {} a {}.".format( verb, "newline" if c == "\\n" else "tab"), "yellow") if results.comment_ratio < results.COMMENT_MIN: termcolor.cprint("{} consider adding more comments!".format(conjunction), "yellow") if (results.comment_ratio < results.COMMENT_MIN or self._warn_chars) and results.diffs: print()
[ "def", "run_diff", "(", "self", ")", ":", "files", "=", "tuple", "(", "self", ".", "files", ")", "# Use same header as more.", "header", ",", "footer", "=", "(", "termcolor", ".", "colored", "(", "\"{0}\\n{{}}\\n{0}\\n\"", ".", "format", "(", "\":\"", "*", "14", ")", ",", "\"cyan\"", ")", ",", "\"\\n\"", ")", "if", "len", "(", "files", ")", ">", "1", "else", "(", "\"\"", ",", "\"\"", ")", "for", "file", "in", "files", ":", "print", "(", "header", ".", "format", "(", "file", ")", ",", "end", "=", "\"\"", ")", "try", ":", "results", "=", "self", ".", "_check", "(", "file", ")", "except", "Error", "as", "e", ":", "termcolor", ".", "cprint", "(", "e", ".", "msg", ",", "\"yellow\"", ",", "file", "=", "sys", ".", "stderr", ")", "continue", "# Display results", "if", "results", ".", "diffs", ":", "print", "(", ")", "print", "(", "*", "self", ".", "diff", "(", "results", ".", "original", ",", "results", ".", "styled", ")", ",", "sep", "=", "\"\\n\"", ")", "print", "(", ")", "conjunction", "=", "\"And\"", "else", ":", "termcolor", ".", "cprint", "(", "\"Looks good!\"", ",", "\"green\"", ")", "conjunction", "=", "\"But\"", "if", "results", ".", "diffs", ":", "for", "type", ",", "c", "in", "sorted", "(", "self", ".", "_warn_chars", ")", ":", "color", ",", "verb", "=", "(", "\"on_green\"", ",", "\"insert\"", ")", "if", "type", "==", "\"+\"", "else", "(", "\"on_red\"", ",", "\"delete\"", ")", "termcolor", ".", "cprint", "(", "c", ",", "None", ",", "color", ",", "end", "=", "\"\"", ")", "termcolor", ".", "cprint", "(", "\" means that you should {} a {}.\"", ".", "format", "(", "verb", ",", "\"newline\"", "if", "c", "==", "\"\\\\n\"", "else", "\"tab\"", ")", ",", "\"yellow\"", ")", "if", "results", ".", "comment_ratio", "<", "results", ".", "COMMENT_MIN", ":", "termcolor", ".", "cprint", "(", "\"{} consider adding more comments!\"", ".", "format", "(", "conjunction", ")", ",", "\"yellow\"", ")", "if", "(", "results", ".", "comment_ratio", "<", "results", ".", "COMMENT_MIN", "or", "self", ".", "_warn_chars", ")", "and", "results", ".", "diffs", ":", "print", "(", ")" ]
Run checks on self.files, printing diff of styled/unstyled output to stdout.
[ "Run", "checks", "on", "self", ".", "files", "printing", "diff", "of", "styled", "/", "unstyled", "output", "to", "stdout", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L96-L134
cs50/style50
style50/_api.py
Style50.run_json
def run_json(self): """ Run checks on self.files, printing json object containing information relavent to the CS50 IDE plugin at the end. """ checks = {} for file in self.files: try: results = self._check(file) except Error as e: checks[file] = { "error": e.msg } else: checks[file] = { "score": results.score, "comments": results.comment_ratio >= results.COMMENT_MIN, "diff": "<pre>{}</pre>".format("\n".join(self.html_diff(results.original, results.styled))), } json.dump(checks, sys.stdout, indent=4) print()
python
def run_json(self): """ Run checks on self.files, printing json object containing information relavent to the CS50 IDE plugin at the end. """ checks = {} for file in self.files: try: results = self._check(file) except Error as e: checks[file] = { "error": e.msg } else: checks[file] = { "score": results.score, "comments": results.comment_ratio >= results.COMMENT_MIN, "diff": "<pre>{}</pre>".format("\n".join(self.html_diff(results.original, results.styled))), } json.dump(checks, sys.stdout, indent=4) print()
[ "def", "run_json", "(", "self", ")", ":", "checks", "=", "{", "}", "for", "file", "in", "self", ".", "files", ":", "try", ":", "results", "=", "self", ".", "_check", "(", "file", ")", "except", "Error", "as", "e", ":", "checks", "[", "file", "]", "=", "{", "\"error\"", ":", "e", ".", "msg", "}", "else", ":", "checks", "[", "file", "]", "=", "{", "\"score\"", ":", "results", ".", "score", ",", "\"comments\"", ":", "results", ".", "comment_ratio", ">=", "results", ".", "COMMENT_MIN", ",", "\"diff\"", ":", "\"<pre>{}</pre>\"", ".", "format", "(", "\"\\n\"", ".", "join", "(", "self", ".", "html_diff", "(", "results", ".", "original", ",", "results", ".", "styled", ")", ")", ")", ",", "}", "json", ".", "dump", "(", "checks", ",", "sys", ".", "stdout", ",", "indent", "=", "4", ")", "print", "(", ")" ]
Run checks on self.files, printing json object containing information relavent to the CS50 IDE plugin at the end.
[ "Run", "checks", "on", "self", ".", "files", "printing", "json", "object", "containing", "information", "relavent", "to", "the", "CS50", "IDE", "plugin", "at", "the", "end", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L136-L157
cs50/style50
style50/_api.py
Style50.run_score
def run_score(self): """ Run checks on self.files, printing raw percentage to stdout. """ diffs = 0 lines = 0 for file in self.files: try: results = self._check(file) except Error as e: termcolor.cprint(e.msg, "yellow", file=sys.stderr) continue diffs += results.diffs lines += results.lines try: print(max(1 - diffs / lines, 0.0)) except ZeroDivisionError: print(0.0)
python
def run_score(self): """ Run checks on self.files, printing raw percentage to stdout. """ diffs = 0 lines = 0 for file in self.files: try: results = self._check(file) except Error as e: termcolor.cprint(e.msg, "yellow", file=sys.stderr) continue diffs += results.diffs lines += results.lines try: print(max(1 - diffs / lines, 0.0)) except ZeroDivisionError: print(0.0)
[ "def", "run_score", "(", "self", ")", ":", "diffs", "=", "0", "lines", "=", "0", "for", "file", "in", "self", ".", "files", ":", "try", ":", "results", "=", "self", ".", "_check", "(", "file", ")", "except", "Error", "as", "e", ":", "termcolor", ".", "cprint", "(", "e", ".", "msg", ",", "\"yellow\"", ",", "file", "=", "sys", ".", "stderr", ")", "continue", "diffs", "+=", "results", ".", "diffs", "lines", "+=", "results", ".", "lines", "try", ":", "print", "(", "max", "(", "1", "-", "diffs", "/", "lines", ",", "0.0", ")", ")", "except", "ZeroDivisionError", ":", "print", "(", "0.0", ")" ]
Run checks on self.files, printing raw percentage to stdout.
[ "Run", "checks", "on", "self", ".", "files", "printing", "raw", "percentage", "to", "stdout", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L159-L179
cs50/style50
style50/_api.py
Style50._check
def _check(self, file): """ Run apropriate check based on `file`'s extension and return it, otherwise raise an Error """ if not os.path.exists(file): raise Error("file \"{}\" not found".format(file)) _, extension = os.path.splitext(file) try: check = self.extension_map[extension[1:]] except KeyError: magic_type = magic.from_file(file) for name, cls in self.magic_map.items(): if name in magic_type: check = cls break else: raise Error("unknown file type \"{}\", skipping...".format(file)) try: with open(file) as f: code = "\n".join(line.rstrip() for line in f) except UnicodeDecodeError: raise Error("file does not seem to contain text, skipping...") # Ensure we don't warn about adding trailing newline try: if code[-1] != '\n': code += '\n' except IndexError: pass return check(code)
python
def _check(self, file): """ Run apropriate check based on `file`'s extension and return it, otherwise raise an Error """ if not os.path.exists(file): raise Error("file \"{}\" not found".format(file)) _, extension = os.path.splitext(file) try: check = self.extension_map[extension[1:]] except KeyError: magic_type = magic.from_file(file) for name, cls in self.magic_map.items(): if name in magic_type: check = cls break else: raise Error("unknown file type \"{}\", skipping...".format(file)) try: with open(file) as f: code = "\n".join(line.rstrip() for line in f) except UnicodeDecodeError: raise Error("file does not seem to contain text, skipping...") # Ensure we don't warn about adding trailing newline try: if code[-1] != '\n': code += '\n' except IndexError: pass return check(code)
[ "def", "_check", "(", "self", ",", "file", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "file", ")", ":", "raise", "Error", "(", "\"file \\\"{}\\\" not found\"", ".", "format", "(", "file", ")", ")", "_", ",", "extension", "=", "os", ".", "path", ".", "splitext", "(", "file", ")", "try", ":", "check", "=", "self", ".", "extension_map", "[", "extension", "[", "1", ":", "]", "]", "except", "KeyError", ":", "magic_type", "=", "magic", ".", "from_file", "(", "file", ")", "for", "name", ",", "cls", "in", "self", ".", "magic_map", ".", "items", "(", ")", ":", "if", "name", "in", "magic_type", ":", "check", "=", "cls", "break", "else", ":", "raise", "Error", "(", "\"unknown file type \\\"{}\\\", skipping...\"", ".", "format", "(", "file", ")", ")", "try", ":", "with", "open", "(", "file", ")", "as", "f", ":", "code", "=", "\"\\n\"", ".", "join", "(", "line", ".", "rstrip", "(", ")", "for", "line", "in", "f", ")", "except", "UnicodeDecodeError", ":", "raise", "Error", "(", "\"file does not seem to contain text, skipping...\"", ")", "# Ensure we don't warn about adding trailing newline", "try", ":", "if", "code", "[", "-", "1", "]", "!=", "'\\n'", ":", "code", "+=", "'\\n'", "except", "IndexError", ":", "pass", "return", "check", "(", "code", ")" ]
Run apropriate check based on `file`'s extension and return it, otherwise raise an Error
[ "Run", "apropriate", "check", "based", "on", "file", "s", "extension", "and", "return", "it", "otherwise", "raise", "an", "Error" ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L181-L215
cs50/style50
style50/_api.py
Style50.split_diff
def split_diff(old, new): """ Returns a generator yielding the side-by-side diff of `old` and `new`). """ return map(lambda l: l.rstrip(), icdiff.ConsoleDiff(cols=COLUMNS).make_table(old.splitlines(), new.splitlines()))
python
def split_diff(old, new): """ Returns a generator yielding the side-by-side diff of `old` and `new`). """ return map(lambda l: l.rstrip(), icdiff.ConsoleDiff(cols=COLUMNS).make_table(old.splitlines(), new.splitlines()))
[ "def", "split_diff", "(", "old", ",", "new", ")", ":", "return", "map", "(", "lambda", "l", ":", "l", ".", "rstrip", "(", ")", ",", "icdiff", ".", "ConsoleDiff", "(", "cols", "=", "COLUMNS", ")", ".", "make_table", "(", "old", ".", "splitlines", "(", ")", ",", "new", ".", "splitlines", "(", ")", ")", ")" ]
Returns a generator yielding the side-by-side diff of `old` and `new`).
[ "Returns", "a", "generator", "yielding", "the", "side", "-", "by", "-", "side", "diff", "of", "old", "and", "new", ")", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L218-L223
cs50/style50
style50/_api.py
Style50.unified
def unified(old, new): """ Returns a generator yielding a unified diff between `old` and `new`. """ for diff in difflib.ndiff(old.splitlines(), new.splitlines()): if diff[0] == " ": yield diff elif diff[0] == "?": continue else: yield termcolor.colored(diff, "red" if diff[0] == "-" else "green", attrs=["bold"])
python
def unified(old, new): """ Returns a generator yielding a unified diff between `old` and `new`. """ for diff in difflib.ndiff(old.splitlines(), new.splitlines()): if diff[0] == " ": yield diff elif diff[0] == "?": continue else: yield termcolor.colored(diff, "red" if diff[0] == "-" else "green", attrs=["bold"])
[ "def", "unified", "(", "old", ",", "new", ")", ":", "for", "diff", "in", "difflib", ".", "ndiff", "(", "old", ".", "splitlines", "(", ")", ",", "new", ".", "splitlines", "(", ")", ")", ":", "if", "diff", "[", "0", "]", "==", "\" \"", ":", "yield", "diff", "elif", "diff", "[", "0", "]", "==", "\"?\"", ":", "continue", "else", ":", "yield", "termcolor", ".", "colored", "(", "diff", ",", "\"red\"", "if", "diff", "[", "0", "]", "==", "\"-\"", "else", "\"green\"", ",", "attrs", "=", "[", "\"bold\"", "]", ")" ]
Returns a generator yielding a unified diff between `old` and `new`.
[ "Returns", "a", "generator", "yielding", "a", "unified", "diff", "between", "old", "and", "new", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L226-L236
cs50/style50
style50/_api.py
Style50.html_diff
def html_diff(self, old, new): """ Return HTML formatted character-based diff between old and new (used for CS50 IDE). """ def html_transition(old_type, new_type): tags = [] for tag in [("/", old_type), ("", new_type)]: if tag[1] not in ["+", "-"]: continue tags.append("<{}{}>".format(tag[0], "ins" if tag[1] == "+" else "del")) return "".join(tags) return self._char_diff(old, new, html_transition, fmt=cgi.escape)
python
def html_diff(self, old, new): """ Return HTML formatted character-based diff between old and new (used for CS50 IDE). """ def html_transition(old_type, new_type): tags = [] for tag in [("/", old_type), ("", new_type)]: if tag[1] not in ["+", "-"]: continue tags.append("<{}{}>".format(tag[0], "ins" if tag[1] == "+" else "del")) return "".join(tags) return self._char_diff(old, new, html_transition, fmt=cgi.escape)
[ "def", "html_diff", "(", "self", ",", "old", ",", "new", ")", ":", "def", "html_transition", "(", "old_type", ",", "new_type", ")", ":", "tags", "=", "[", "]", "for", "tag", "in", "[", "(", "\"/\"", ",", "old_type", ")", ",", "(", "\"\"", ",", "new_type", ")", "]", ":", "if", "tag", "[", "1", "]", "not", "in", "[", "\"+\"", ",", "\"-\"", "]", ":", "continue", "tags", ".", "append", "(", "\"<{}{}>\"", ".", "format", "(", "tag", "[", "0", "]", ",", "\"ins\"", "if", "tag", "[", "1", "]", "==", "\"+\"", "else", "\"del\"", ")", ")", "return", "\"\"", ".", "join", "(", "tags", ")", "return", "self", ".", "_char_diff", "(", "old", ",", "new", ",", "html_transition", ",", "fmt", "=", "cgi", ".", "escape", ")" ]
Return HTML formatted character-based diff between old and new (used for CS50 IDE).
[ "Return", "HTML", "formatted", "character", "-", "based", "diff", "between", "old", "and", "new", "(", "used", "for", "CS50", "IDE", ")", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L238-L250
cs50/style50
style50/_api.py
Style50.char_diff
def char_diff(self, old, new): """ Return color-coded character-based diff between `old` and `new`. """ def color_transition(old_type, new_type): new_color = termcolor.colored("", None, "on_red" if new_type == "-" else "on_green" if new_type == "+" else None) return "{}{}".format(termcolor.RESET, new_color[:-len(termcolor.RESET)]) return self._char_diff(old, new, color_transition)
python
def char_diff(self, old, new): """ Return color-coded character-based diff between `old` and `new`. """ def color_transition(old_type, new_type): new_color = termcolor.colored("", None, "on_red" if new_type == "-" else "on_green" if new_type == "+" else None) return "{}{}".format(termcolor.RESET, new_color[:-len(termcolor.RESET)]) return self._char_diff(old, new, color_transition)
[ "def", "char_diff", "(", "self", ",", "old", ",", "new", ")", ":", "def", "color_transition", "(", "old_type", ",", "new_type", ")", ":", "new_color", "=", "termcolor", ".", "colored", "(", "\"\"", ",", "None", ",", "\"on_red\"", "if", "new_type", "==", "\"-\"", "else", "\"on_green\"", "if", "new_type", "==", "\"+\"", "else", "None", ")", "return", "\"{}{}\"", ".", "format", "(", "termcolor", ".", "RESET", ",", "new_color", "[", ":", "-", "len", "(", "termcolor", ".", "RESET", ")", "]", ")", "return", "self", ".", "_char_diff", "(", "old", ",", "new", ",", "color_transition", ")" ]
Return color-coded character-based diff between `old` and `new`.
[ "Return", "color", "-", "coded", "character", "-", "based", "diff", "between", "old", "and", "new", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L252-L261
cs50/style50
style50/_api.py
Style50._char_diff
def _char_diff(self, old, new, transition, fmt=lambda c: c): """ Returns a char-based diff between `old` and `new` where each character is formatted by `fmt` and transitions between blocks are determined by `transition`. """ differ = difflib.ndiff(old, new) # Type of difference. dtype = None # Buffer for current line. line = [] while True: # Get next diff or None if we're at the end. d = next(differ, (None,)) if d[0] != dtype: line += transition(dtype, d[0]) dtype = d[0] if dtype is None: break if d[2] == "\n": if dtype != " ": self._warn_chars.add((dtype, "\\n")) # Show added/removed newlines. line += [fmt(r"\n"), transition(dtype, " ")] # Don't yield a line if we are removing a newline if dtype != "-": yield "".join(line) line.clear() line.append(transition(" ", dtype)) elif dtype != " " and d[2] == "\t": # Show added/removed tabs. line.append(fmt("\\t")) self._warn_chars.add((dtype, "\\t")) else: line.append(fmt(d[2])) # Flush buffer before quitting. last = "".join(line) # Only print last line if it contains non-ANSI characters. if re.sub(r"\x1b[^m]*m", "", last): yield last
python
def _char_diff(self, old, new, transition, fmt=lambda c: c): """ Returns a char-based diff between `old` and `new` where each character is formatted by `fmt` and transitions between blocks are determined by `transition`. """ differ = difflib.ndiff(old, new) # Type of difference. dtype = None # Buffer for current line. line = [] while True: # Get next diff or None if we're at the end. d = next(differ, (None,)) if d[0] != dtype: line += transition(dtype, d[0]) dtype = d[0] if dtype is None: break if d[2] == "\n": if dtype != " ": self._warn_chars.add((dtype, "\\n")) # Show added/removed newlines. line += [fmt(r"\n"), transition(dtype, " ")] # Don't yield a line if we are removing a newline if dtype != "-": yield "".join(line) line.clear() line.append(transition(" ", dtype)) elif dtype != " " and d[2] == "\t": # Show added/removed tabs. line.append(fmt("\\t")) self._warn_chars.add((dtype, "\\t")) else: line.append(fmt(d[2])) # Flush buffer before quitting. last = "".join(line) # Only print last line if it contains non-ANSI characters. if re.sub(r"\x1b[^m]*m", "", last): yield last
[ "def", "_char_diff", "(", "self", ",", "old", ",", "new", ",", "transition", ",", "fmt", "=", "lambda", "c", ":", "c", ")", ":", "differ", "=", "difflib", ".", "ndiff", "(", "old", ",", "new", ")", "# Type of difference.", "dtype", "=", "None", "# Buffer for current line.", "line", "=", "[", "]", "while", "True", ":", "# Get next diff or None if we're at the end.", "d", "=", "next", "(", "differ", ",", "(", "None", ",", ")", ")", "if", "d", "[", "0", "]", "!=", "dtype", ":", "line", "+=", "transition", "(", "dtype", ",", "d", "[", "0", "]", ")", "dtype", "=", "d", "[", "0", "]", "if", "dtype", "is", "None", ":", "break", "if", "d", "[", "2", "]", "==", "\"\\n\"", ":", "if", "dtype", "!=", "\" \"", ":", "self", ".", "_warn_chars", ".", "add", "(", "(", "dtype", ",", "\"\\\\n\"", ")", ")", "# Show added/removed newlines.", "line", "+=", "[", "fmt", "(", "r\"\\n\"", ")", ",", "transition", "(", "dtype", ",", "\" \"", ")", "]", "# Don't yield a line if we are removing a newline", "if", "dtype", "!=", "\"-\"", ":", "yield", "\"\"", ".", "join", "(", "line", ")", "line", ".", "clear", "(", ")", "line", ".", "append", "(", "transition", "(", "\" \"", ",", "dtype", ")", ")", "elif", "dtype", "!=", "\" \"", "and", "d", "[", "2", "]", "==", "\"\\t\"", ":", "# Show added/removed tabs.", "line", ".", "append", "(", "fmt", "(", "\"\\\\t\"", ")", ")", "self", ".", "_warn_chars", ".", "add", "(", "(", "dtype", ",", "\"\\\\t\"", ")", ")", "else", ":", "line", ".", "append", "(", "fmt", "(", "d", "[", "2", "]", ")", ")", "# Flush buffer before quitting.", "last", "=", "\"\"", ".", "join", "(", "line", ")", "# Only print last line if it contains non-ANSI characters.", "if", "re", ".", "sub", "(", "r\"\\x1b[^m]*m\"", ",", "\"\"", ",", "last", ")", ":", "yield", "last" ]
Returns a char-based diff between `old` and `new` where each character is formatted by `fmt` and transitions between blocks are determined by `transition`.
[ "Returns", "a", "char", "-", "based", "diff", "between", "old", "and", "new", "where", "each", "character", "is", "formatted", "by", "fmt", "and", "transitions", "between", "blocks", "are", "determined", "by", "transition", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L263-L309
cs50/style50
style50/_api.py
StyleCheck.count_lines
def count_lines(self, code): """ Count lines of code (by default ignores empty lines, but child could override to do more). """ return sum(bool(line.strip()) for line in code.splitlines())
python
def count_lines(self, code): """ Count lines of code (by default ignores empty lines, but child could override to do more). """ return sum(bool(line.strip()) for line in code.splitlines())
[ "def", "count_lines", "(", "self", ",", "code", ")", ":", "return", "sum", "(", "bool", "(", "line", ".", "strip", "(", ")", ")", "for", "line", "in", "code", ".", "splitlines", "(", ")", ")" ]
Count lines of code (by default ignores empty lines, but child could override to do more).
[ "Count", "lines", "of", "code", "(", "by", "default", "ignores", "empty", "lines", "but", "child", "could", "override", "to", "do", "more", ")", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L366-L370
cs50/style50
style50/_api.py
StyleCheck.run
def run(command, input=None, exit=0, shell=False): """ Run `command` passing it stdin from `input`, throwing a DependencyError if comand is not found. Throws Error if exit code of command is not `exit` (unless `exit` is None). """ if isinstance(input, str): input = input.encode() # Only pipe stdin if we have input to pipe. stdin = {} if input is None else {"stdin": subprocess.PIPE} try: child = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **stdin) except FileNotFoundError as e: # Extract name of command. name = command.split(' ', 1)[0] if isinstance(command, str) else command[0] raise DependencyError(name) stdout, _ = child.communicate(input=input) if exit is not None and child.returncode != exit: raise Error("failed to stylecheck code") return stdout.decode()
python
def run(command, input=None, exit=0, shell=False): """ Run `command` passing it stdin from `input`, throwing a DependencyError if comand is not found. Throws Error if exit code of command is not `exit` (unless `exit` is None). """ if isinstance(input, str): input = input.encode() # Only pipe stdin if we have input to pipe. stdin = {} if input is None else {"stdin": subprocess.PIPE} try: child = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **stdin) except FileNotFoundError as e: # Extract name of command. name = command.split(' ', 1)[0] if isinstance(command, str) else command[0] raise DependencyError(name) stdout, _ = child.communicate(input=input) if exit is not None and child.returncode != exit: raise Error("failed to stylecheck code") return stdout.decode()
[ "def", "run", "(", "command", ",", "input", "=", "None", ",", "exit", "=", "0", ",", "shell", "=", "False", ")", ":", "if", "isinstance", "(", "input", ",", "str", ")", ":", "input", "=", "input", ".", "encode", "(", ")", "# Only pipe stdin if we have input to pipe.", "stdin", "=", "{", "}", "if", "input", "is", "None", "else", "{", "\"stdin\"", ":", "subprocess", ".", "PIPE", "}", "try", ":", "child", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "*", "*", "stdin", ")", "except", "FileNotFoundError", "as", "e", ":", "# Extract name of command.", "name", "=", "command", ".", "split", "(", "' '", ",", "1", ")", "[", "0", "]", "if", "isinstance", "(", "command", ",", "str", ")", "else", "command", "[", "0", "]", "raise", "DependencyError", "(", "name", ")", "stdout", ",", "_", "=", "child", ".", "communicate", "(", "input", "=", "input", ")", "if", "exit", "is", "not", "None", "and", "child", ".", "returncode", "!=", "exit", ":", "raise", "Error", "(", "\"failed to stylecheck code\"", ")", "return", "stdout", ".", "decode", "(", ")" ]
Run `command` passing it stdin from `input`, throwing a DependencyError if comand is not found. Throws Error if exit code of command is not `exit` (unless `exit` is None).
[ "Run", "command", "passing", "it", "stdin", "from", "input", "throwing", "a", "DependencyError", "if", "comand", "is", "not", "found", ".", "Throws", "Error", "if", "exit", "code", "of", "command", "is", "not", "exit", "(", "unless", "exit", "is", "None", ")", "." ]
train
https://github.com/cs50/style50/blob/2dfe5957f7b727ee5163499e7b8191275aee914c/style50/_api.py#L373-L394
Julius2342/pyvlx
pyvlx/frame_creation.py
frame_from_raw
def frame_from_raw(raw): """Create and return frame from raw bytes.""" command, payload = extract_from_frame(raw) frame = create_frame(command) if frame is None: PYVLXLOG.warning("Command %s not implemented, raw: %s", command, ":".join("{:02x}".format(c) for c in raw)) return None frame.validate_payload_len(payload) frame.from_payload(payload) return frame
python
def frame_from_raw(raw): """Create and return frame from raw bytes.""" command, payload = extract_from_frame(raw) frame = create_frame(command) if frame is None: PYVLXLOG.warning("Command %s not implemented, raw: %s", command, ":".join("{:02x}".format(c) for c in raw)) return None frame.validate_payload_len(payload) frame.from_payload(payload) return frame
[ "def", "frame_from_raw", "(", "raw", ")", ":", "command", ",", "payload", "=", "extract_from_frame", "(", "raw", ")", "frame", "=", "create_frame", "(", "command", ")", "if", "frame", "is", "None", ":", "PYVLXLOG", ".", "warning", "(", "\"Command %s not implemented, raw: %s\"", ",", "command", ",", "\":\"", ".", "join", "(", "\"{:02x}\"", ".", "format", "(", "c", ")", "for", "c", "in", "raw", ")", ")", "return", "None", "frame", ".", "validate_payload_len", "(", "payload", ")", "frame", ".", "from_payload", "(", "payload", ")", "return", "frame" ]
Create and return frame from raw bytes.
[ "Create", "and", "return", "frame", "from", "raw", "bytes", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frame_creation.py#L32-L41
Julius2342/pyvlx
pyvlx/frame_creation.py
create_frame
def create_frame(command): """Create and return empty Frame from Command.""" # pylint: disable=too-many-branches,too-many-return-statements if command == Command.GW_ERROR_NTF: return FrameErrorNotification() if command == Command.GW_COMMAND_SEND_REQ: return FrameCommandSendRequest() if command == Command.GW_COMMAND_SEND_CFM: return FrameCommandSendConfirmation() if command == Command.GW_COMMAND_RUN_STATUS_NTF: return FrameCommandRunStatusNotification() if command == Command.GW_COMMAND_REMAINING_TIME_NTF: return FrameCommandRemainingTimeNotification() if command == Command.GW_SESSION_FINISHED_NTF: return FrameSessionFinishedNotification() if command == Command.GW_PASSWORD_ENTER_REQ: return FramePasswordEnterRequest() if command == Command.GW_PASSWORD_ENTER_CFM: return FramePasswordEnterConfirmation() if command == Command.GW_CS_DISCOVER_NODES_REQ: return FrameDiscoverNodesRequest() if command == Command.GW_CS_DISCOVER_NODES_CFM: return FrameDiscoverNodesConfirmation() if command == Command.GW_CS_DISCOVER_NODES_NTF: return FrameDiscoverNodesNotification() if command == Command.GW_GET_SCENE_LIST_REQ: return FrameGetSceneListRequest() if command == Command.GW_GET_SCENE_LIST_CFM: return FrameGetSceneListConfirmation() if command == Command.GW_GET_SCENE_LIST_NTF: return FrameGetSceneListNotification() if command == Command.GW_GET_NODE_INFORMATION_REQ: return FrameGetNodeInformationRequest() if command == Command.GW_GET_NODE_INFORMATION_CFM: return FrameGetNodeInformationConfirmation() if command == Command.GW_GET_NODE_INFORMATION_NTF: return FrameGetNodeInformationNotification() if command == Command.GW_GET_ALL_NODES_INFORMATION_REQ: return FrameGetAllNodesInformationRequest() if command == Command.GW_GET_ALL_NODES_INFORMATION_CFM: return FrameGetAllNodesInformationConfirmation() if command == Command.GW_GET_ALL_NODES_INFORMATION_NTF: return FrameGetAllNodesInformationNotification() if command == Command.GW_GET_ALL_NODES_INFORMATION_FINISHED_NTF: return FrameGetAllNodesInformationFinishedNotification() if command == Command.GW_ACTIVATE_SCENE_REQ: return FrameActivateSceneRequest() if command == Command.GW_ACTIVATE_SCENE_CFM: return FrameActivateSceneConfirmation() if command == Command.GW_GET_VERSION_REQ: return FrameGetVersionRequest() if command == Command.GW_GET_VERSION_CFM: return FrameGetVersionConfirmation() if command == Command.GW_GET_PROTOCOL_VERSION_REQ: return FrameGetProtocolVersionRequest() if command == Command.GW_GET_PROTOCOL_VERSION_CFM: return FrameGetProtocolVersionConfirmation() if command == Command.GW_SET_NODE_NAME_REQ: return FrameSetNodeNameRequest() if command == Command.GW_SET_NODE_NAME_CFM: return FrameSetNodeNameConfirmation() if command == Command.GW_NODE_INFORMATION_CHANGED_NTF: return FrameNodeInformationChangedNotification() if command == Command.GW_GET_STATE_REQ: return FrameGetStateRequest() if command == Command.GW_GET_STATE_CFM: return FrameGetStateConfirmation() if command == Command.GW_SET_UTC_REQ: return FrameSetUTCRequest() if command == Command.GW_SET_UTC_CFM: return FrameSetUTCConfirmation() if command == Command.GW_ACTIVATION_LOG_UPDATED_NTF: return FrameActivationLogUpdatedNotification() if command == Command.GW_HOUSE_STATUS_MONITOR_ENABLE_REQ: return FrameHouseStatusMonitorEnableRequest() if command == Command.GW_HOUSE_STATUS_MONITOR_ENABLE_CFM: return FrameHouseStatusMonitorEnableConfirmation() if command == Command.GW_HOUSE_STATUS_MONITOR_DISABLE_REQ: return FrameHouseStatusMonitorDisableRequest() if command == Command.GW_HOUSE_STATUS_MONITOR_DISABLE_CFM: return FrameHouseStatusMonitorDisableConfirmation() if command == Command.GW_NODE_STATE_POSITION_CHANGED_NTF: return FrameNodeStatePositionChangedNotification() return None
python
def create_frame(command): """Create and return empty Frame from Command.""" # pylint: disable=too-many-branches,too-many-return-statements if command == Command.GW_ERROR_NTF: return FrameErrorNotification() if command == Command.GW_COMMAND_SEND_REQ: return FrameCommandSendRequest() if command == Command.GW_COMMAND_SEND_CFM: return FrameCommandSendConfirmation() if command == Command.GW_COMMAND_RUN_STATUS_NTF: return FrameCommandRunStatusNotification() if command == Command.GW_COMMAND_REMAINING_TIME_NTF: return FrameCommandRemainingTimeNotification() if command == Command.GW_SESSION_FINISHED_NTF: return FrameSessionFinishedNotification() if command == Command.GW_PASSWORD_ENTER_REQ: return FramePasswordEnterRequest() if command == Command.GW_PASSWORD_ENTER_CFM: return FramePasswordEnterConfirmation() if command == Command.GW_CS_DISCOVER_NODES_REQ: return FrameDiscoverNodesRequest() if command == Command.GW_CS_DISCOVER_NODES_CFM: return FrameDiscoverNodesConfirmation() if command == Command.GW_CS_DISCOVER_NODES_NTF: return FrameDiscoverNodesNotification() if command == Command.GW_GET_SCENE_LIST_REQ: return FrameGetSceneListRequest() if command == Command.GW_GET_SCENE_LIST_CFM: return FrameGetSceneListConfirmation() if command == Command.GW_GET_SCENE_LIST_NTF: return FrameGetSceneListNotification() if command == Command.GW_GET_NODE_INFORMATION_REQ: return FrameGetNodeInformationRequest() if command == Command.GW_GET_NODE_INFORMATION_CFM: return FrameGetNodeInformationConfirmation() if command == Command.GW_GET_NODE_INFORMATION_NTF: return FrameGetNodeInformationNotification() if command == Command.GW_GET_ALL_NODES_INFORMATION_REQ: return FrameGetAllNodesInformationRequest() if command == Command.GW_GET_ALL_NODES_INFORMATION_CFM: return FrameGetAllNodesInformationConfirmation() if command == Command.GW_GET_ALL_NODES_INFORMATION_NTF: return FrameGetAllNodesInformationNotification() if command == Command.GW_GET_ALL_NODES_INFORMATION_FINISHED_NTF: return FrameGetAllNodesInformationFinishedNotification() if command == Command.GW_ACTIVATE_SCENE_REQ: return FrameActivateSceneRequest() if command == Command.GW_ACTIVATE_SCENE_CFM: return FrameActivateSceneConfirmation() if command == Command.GW_GET_VERSION_REQ: return FrameGetVersionRequest() if command == Command.GW_GET_VERSION_CFM: return FrameGetVersionConfirmation() if command == Command.GW_GET_PROTOCOL_VERSION_REQ: return FrameGetProtocolVersionRequest() if command == Command.GW_GET_PROTOCOL_VERSION_CFM: return FrameGetProtocolVersionConfirmation() if command == Command.GW_SET_NODE_NAME_REQ: return FrameSetNodeNameRequest() if command == Command.GW_SET_NODE_NAME_CFM: return FrameSetNodeNameConfirmation() if command == Command.GW_NODE_INFORMATION_CHANGED_NTF: return FrameNodeInformationChangedNotification() if command == Command.GW_GET_STATE_REQ: return FrameGetStateRequest() if command == Command.GW_GET_STATE_CFM: return FrameGetStateConfirmation() if command == Command.GW_SET_UTC_REQ: return FrameSetUTCRequest() if command == Command.GW_SET_UTC_CFM: return FrameSetUTCConfirmation() if command == Command.GW_ACTIVATION_LOG_UPDATED_NTF: return FrameActivationLogUpdatedNotification() if command == Command.GW_HOUSE_STATUS_MONITOR_ENABLE_REQ: return FrameHouseStatusMonitorEnableRequest() if command == Command.GW_HOUSE_STATUS_MONITOR_ENABLE_CFM: return FrameHouseStatusMonitorEnableConfirmation() if command == Command.GW_HOUSE_STATUS_MONITOR_DISABLE_REQ: return FrameHouseStatusMonitorDisableRequest() if command == Command.GW_HOUSE_STATUS_MONITOR_DISABLE_CFM: return FrameHouseStatusMonitorDisableConfirmation() if command == Command.GW_NODE_STATE_POSITION_CHANGED_NTF: return FrameNodeStatePositionChangedNotification() return None
[ "def", "create_frame", "(", "command", ")", ":", "# pylint: disable=too-many-branches,too-many-return-statements", "if", "command", "==", "Command", ".", "GW_ERROR_NTF", ":", "return", "FrameErrorNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_COMMAND_SEND_REQ", ":", "return", "FrameCommandSendRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_COMMAND_SEND_CFM", ":", "return", "FrameCommandSendConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_COMMAND_RUN_STATUS_NTF", ":", "return", "FrameCommandRunStatusNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_COMMAND_REMAINING_TIME_NTF", ":", "return", "FrameCommandRemainingTimeNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_SESSION_FINISHED_NTF", ":", "return", "FrameSessionFinishedNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_PASSWORD_ENTER_REQ", ":", "return", "FramePasswordEnterRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_PASSWORD_ENTER_CFM", ":", "return", "FramePasswordEnterConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_CS_DISCOVER_NODES_REQ", ":", "return", "FrameDiscoverNodesRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_CS_DISCOVER_NODES_CFM", ":", "return", "FrameDiscoverNodesConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_CS_DISCOVER_NODES_NTF", ":", "return", "FrameDiscoverNodesNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_SCENE_LIST_REQ", ":", "return", "FrameGetSceneListRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_SCENE_LIST_CFM", ":", "return", "FrameGetSceneListConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_SCENE_LIST_NTF", ":", "return", "FrameGetSceneListNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_NODE_INFORMATION_REQ", ":", "return", "FrameGetNodeInformationRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_NODE_INFORMATION_CFM", ":", "return", "FrameGetNodeInformationConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_NODE_INFORMATION_NTF", ":", "return", "FrameGetNodeInformationNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_ALL_NODES_INFORMATION_REQ", ":", "return", "FrameGetAllNodesInformationRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_ALL_NODES_INFORMATION_CFM", ":", "return", "FrameGetAllNodesInformationConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_ALL_NODES_INFORMATION_NTF", ":", "return", "FrameGetAllNodesInformationNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_ALL_NODES_INFORMATION_FINISHED_NTF", ":", "return", "FrameGetAllNodesInformationFinishedNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_ACTIVATE_SCENE_REQ", ":", "return", "FrameActivateSceneRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_ACTIVATE_SCENE_CFM", ":", "return", "FrameActivateSceneConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_VERSION_REQ", ":", "return", "FrameGetVersionRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_VERSION_CFM", ":", "return", "FrameGetVersionConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_PROTOCOL_VERSION_REQ", ":", "return", "FrameGetProtocolVersionRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_PROTOCOL_VERSION_CFM", ":", "return", "FrameGetProtocolVersionConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_SET_NODE_NAME_REQ", ":", "return", "FrameSetNodeNameRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_SET_NODE_NAME_CFM", ":", "return", "FrameSetNodeNameConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_NODE_INFORMATION_CHANGED_NTF", ":", "return", "FrameNodeInformationChangedNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_STATE_REQ", ":", "return", "FrameGetStateRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_GET_STATE_CFM", ":", "return", "FrameGetStateConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_SET_UTC_REQ", ":", "return", "FrameSetUTCRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_SET_UTC_CFM", ":", "return", "FrameSetUTCConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_ACTIVATION_LOG_UPDATED_NTF", ":", "return", "FrameActivationLogUpdatedNotification", "(", ")", "if", "command", "==", "Command", ".", "GW_HOUSE_STATUS_MONITOR_ENABLE_REQ", ":", "return", "FrameHouseStatusMonitorEnableRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_HOUSE_STATUS_MONITOR_ENABLE_CFM", ":", "return", "FrameHouseStatusMonitorEnableConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_HOUSE_STATUS_MONITOR_DISABLE_REQ", ":", "return", "FrameHouseStatusMonitorDisableRequest", "(", ")", "if", "command", "==", "Command", ".", "GW_HOUSE_STATUS_MONITOR_DISABLE_CFM", ":", "return", "FrameHouseStatusMonitorDisableConfirmation", "(", ")", "if", "command", "==", "Command", ".", "GW_NODE_STATE_POSITION_CHANGED_NTF", ":", "return", "FrameNodeStatePositionChangedNotification", "(", ")", "return", "None" ]
Create and return empty Frame from Command.
[ "Create", "and", "return", "empty", "Frame", "from", "Command", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frame_creation.py#L44-L142
Julius2342/pyvlx
pyvlx/login.py
Login.handle_frame
async def handle_frame(self, frame): """Handle incoming API frame, return True if this was the expected frame.""" if not isinstance(frame, FramePasswordEnterConfirmation): return False if frame.status == PasswordEnterConfirmationStatus.FAILED: PYVLXLOG.warning('Failed to authenticate with password "%s****"', self.password[:2]) self.success = False if frame.status == PasswordEnterConfirmationStatus.SUCCESSFUL: self.success = True return True
python
async def handle_frame(self, frame): """Handle incoming API frame, return True if this was the expected frame.""" if not isinstance(frame, FramePasswordEnterConfirmation): return False if frame.status == PasswordEnterConfirmationStatus.FAILED: PYVLXLOG.warning('Failed to authenticate with password "%s****"', self.password[:2]) self.success = False if frame.status == PasswordEnterConfirmationStatus.SUCCESSFUL: self.success = True return True
[ "async", "def", "handle_frame", "(", "self", ",", "frame", ")", ":", "if", "not", "isinstance", "(", "frame", ",", "FramePasswordEnterConfirmation", ")", ":", "return", "False", "if", "frame", ".", "status", "==", "PasswordEnterConfirmationStatus", ".", "FAILED", ":", "PYVLXLOG", ".", "warning", "(", "'Failed to authenticate with password \"%s****\"'", ",", "self", ".", "password", "[", ":", "2", "]", ")", "self", ".", "success", "=", "False", "if", "frame", ".", "status", "==", "PasswordEnterConfirmationStatus", ".", "SUCCESSFUL", ":", "self", ".", "success", "=", "True", "return", "True" ]
Handle incoming API frame, return True if this was the expected frame.
[ "Handle", "incoming", "API", "frame", "return", "True", "if", "this", "was", "the", "expected", "frame", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/login.py#L18-L27
Julius2342/pyvlx
pyvlx/frames/frame_get_protocol_version.py
FrameGetProtocolVersionConfirmation.get_payload
def get_payload(self): """Return Payload.""" return bytes( [self.major_version >> 8 & 255, self.major_version & 255, self.minor_version >> 8 & 255, self.minor_version & 255])
python
def get_payload(self): """Return Payload.""" return bytes( [self.major_version >> 8 & 255, self.major_version & 255, self.minor_version >> 8 & 255, self.minor_version & 255])
[ "def", "get_payload", "(", "self", ")", ":", "return", "bytes", "(", "[", "self", ".", "major_version", ">>", "8", "&", "255", ",", "self", ".", "major_version", "&", "255", ",", "self", ".", "minor_version", ">>", "8", "&", "255", ",", "self", ".", "minor_version", "&", "255", "]", ")" ]
Return Payload.
[ "Return", "Payload", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_get_protocol_version.py#L33-L37
Julius2342/pyvlx
pyvlx/frames/frame_get_protocol_version.py
FrameGetProtocolVersionConfirmation.from_payload
def from_payload(self, payload): """Init frame from binary data.""" self.major_version = payload[0] * 256 + payload[1] self.minor_version = payload[2] * 256 + payload[3]
python
def from_payload(self, payload): """Init frame from binary data.""" self.major_version = payload[0] * 256 + payload[1] self.minor_version = payload[2] * 256 + payload[3]
[ "def", "from_payload", "(", "self", ",", "payload", ")", ":", "self", ".", "major_version", "=", "payload", "[", "0", "]", "*", "256", "+", "payload", "[", "1", "]", "self", ".", "minor_version", "=", "payload", "[", "2", "]", "*", "256", "+", "payload", "[", "3", "]" ]
Init frame from binary data.
[ "Init", "frame", "from", "binary", "data", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_get_protocol_version.py#L39-L42
Julius2342/pyvlx
pyvlx/connection.py
TCPTransport.data_received
def data_received(self, data): """Handle data received.""" self.tokenizer.feed(data) while self.tokenizer.has_tokens(): raw = self.tokenizer.get_next_token() frame = frame_from_raw(raw) if frame is not None: self.frame_received_cb(frame)
python
def data_received(self, data): """Handle data received.""" self.tokenizer.feed(data) while self.tokenizer.has_tokens(): raw = self.tokenizer.get_next_token() frame = frame_from_raw(raw) if frame is not None: self.frame_received_cb(frame)
[ "def", "data_received", "(", "self", ",", "data", ")", ":", "self", ".", "tokenizer", ".", "feed", "(", "data", ")", "while", "self", ".", "tokenizer", ".", "has_tokens", "(", ")", ":", "raw", "=", "self", ".", "tokenizer", ".", "get_next_token", "(", ")", "frame", "=", "frame_from_raw", "(", "raw", ")", "if", "frame", "is", "not", "None", ":", "self", ".", "frame_received_cb", "(", "frame", ")" ]
Handle data received.
[ "Handle", "data", "received", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/connection.py#L47-L54
Julius2342/pyvlx
pyvlx/connection.py
Connection.connect
async def connect(self): """Connect to gateway via SSL.""" tcp_client = TCPTransport(self.frame_received_cb, self.connection_closed_cb) self.transport, _ = await self.loop.create_connection( lambda: tcp_client, host=self.config.host, port=self.config.port, ssl=self.create_ssl_context()) self.connected = True
python
async def connect(self): """Connect to gateway via SSL.""" tcp_client = TCPTransport(self.frame_received_cb, self.connection_closed_cb) self.transport, _ = await self.loop.create_connection( lambda: tcp_client, host=self.config.host, port=self.config.port, ssl=self.create_ssl_context()) self.connected = True
[ "async", "def", "connect", "(", "self", ")", ":", "tcp_client", "=", "TCPTransport", "(", "self", ".", "frame_received_cb", ",", "self", ".", "connection_closed_cb", ")", "self", ".", "transport", ",", "_", "=", "await", "self", ".", "loop", ".", "create_connection", "(", "lambda", ":", "tcp_client", ",", "host", "=", "self", ".", "config", ".", "host", ",", "port", "=", "self", ".", "config", ".", "port", ",", "ssl", "=", "self", ".", "create_ssl_context", "(", ")", ")", "self", ".", "connected", "=", "True" ]
Connect to gateway via SSL.
[ "Connect", "to", "gateway", "via", "SSL", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/connection.py#L82-L90
Julius2342/pyvlx
pyvlx/connection.py
Connection.write
def write(self, frame): """Write frame to Bus.""" if not isinstance(frame, FrameBase): raise PyVLXException("Frame not of type FrameBase", frame_type=type(frame)) PYVLXLOG.debug("SEND: %s", frame) self.transport.write(slip_pack(bytes(frame)))
python
def write(self, frame): """Write frame to Bus.""" if not isinstance(frame, FrameBase): raise PyVLXException("Frame not of type FrameBase", frame_type=type(frame)) PYVLXLOG.debug("SEND: %s", frame) self.transport.write(slip_pack(bytes(frame)))
[ "def", "write", "(", "self", ",", "frame", ")", ":", "if", "not", "isinstance", "(", "frame", ",", "FrameBase", ")", ":", "raise", "PyVLXException", "(", "\"Frame not of type FrameBase\"", ",", "frame_type", "=", "type", "(", "frame", ")", ")", "PYVLXLOG", ".", "debug", "(", "\"SEND: %s\"", ",", "frame", ")", "self", ".", "transport", ".", "write", "(", "slip_pack", "(", "bytes", "(", "frame", ")", ")", ")" ]
Write frame to Bus.
[ "Write", "frame", "to", "Bus", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/connection.py#L100-L105
Julius2342/pyvlx
pyvlx/connection.py
Connection.create_ssl_context
def create_ssl_context(): """Create and return SSL Context.""" ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE return ssl_context
python
def create_ssl_context(): """Create and return SSL Context.""" ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE return ssl_context
[ "def", "create_ssl_context", "(", ")", ":", "ssl_context", "=", "ssl", ".", "create_default_context", "(", "ssl", ".", "Purpose", ".", "SERVER_AUTH", ")", "ssl_context", ".", "check_hostname", "=", "False", "ssl_context", ".", "verify_mode", "=", "ssl", ".", "CERT_NONE", "return", "ssl_context" ]
Create and return SSL Context.
[ "Create", "and", "return", "SSL", "Context", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/connection.py#L108-L113
Julius2342/pyvlx
pyvlx/connection.py
Connection.frame_received_cb
def frame_received_cb(self, frame): """Received message.""" PYVLXLOG.debug("REC: %s", frame) for frame_received_cb in self.frame_received_cbs: # pylint: disable=not-callable self.loop.create_task(frame_received_cb(frame))
python
def frame_received_cb(self, frame): """Received message.""" PYVLXLOG.debug("REC: %s", frame) for frame_received_cb in self.frame_received_cbs: # pylint: disable=not-callable self.loop.create_task(frame_received_cb(frame))
[ "def", "frame_received_cb", "(", "self", ",", "frame", ")", ":", "PYVLXLOG", ".", "debug", "(", "\"REC: %s\"", ",", "frame", ")", "for", "frame_received_cb", "in", "self", ".", "frame_received_cbs", ":", "# pylint: disable=not-callable", "self", ".", "loop", ".", "create_task", "(", "frame_received_cb", "(", "frame", ")", ")" ]
Received message.
[ "Received", "message", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/connection.py#L115-L120
Julius2342/pyvlx
old_api/pyvlx/config.py
Config.read_config
def read_config(self, path): """Read configuration file.""" self.pyvlx.logger.info('Reading config file: ', path) try: with open(path, 'r') as filehandle: doc = yaml.load(filehandle) if 'config' not in doc: raise PyVLXException('no element config found in: {0}'.format(path)) if 'host' not in doc['config']: raise PyVLXException('no element host found in: {0}'.format(path)) if 'password' not in doc['config']: raise PyVLXException('no element password found in: {0}'.format(path)) self.host = doc['config']['host'] self.password = doc['config']['password'] except FileNotFoundError as ex: raise PyVLXException('file does not exist: {0}'.format(ex))
python
def read_config(self, path): """Read configuration file.""" self.pyvlx.logger.info('Reading config file: ', path) try: with open(path, 'r') as filehandle: doc = yaml.load(filehandle) if 'config' not in doc: raise PyVLXException('no element config found in: {0}'.format(path)) if 'host' not in doc['config']: raise PyVLXException('no element host found in: {0}'.format(path)) if 'password' not in doc['config']: raise PyVLXException('no element password found in: {0}'.format(path)) self.host = doc['config']['host'] self.password = doc['config']['password'] except FileNotFoundError as ex: raise PyVLXException('file does not exist: {0}'.format(ex))
[ "def", "read_config", "(", "self", ",", "path", ")", ":", "self", ".", "pyvlx", ".", "logger", ".", "info", "(", "'Reading config file: '", ",", "path", ")", "try", ":", "with", "open", "(", "path", ",", "'r'", ")", "as", "filehandle", ":", "doc", "=", "yaml", ".", "load", "(", "filehandle", ")", "if", "'config'", "not", "in", "doc", ":", "raise", "PyVLXException", "(", "'no element config found in: {0}'", ".", "format", "(", "path", ")", ")", "if", "'host'", "not", "in", "doc", "[", "'config'", "]", ":", "raise", "PyVLXException", "(", "'no element host found in: {0}'", ".", "format", "(", "path", ")", ")", "if", "'password'", "not", "in", "doc", "[", "'config'", "]", ":", "raise", "PyVLXException", "(", "'no element password found in: {0}'", ".", "format", "(", "path", ")", ")", "self", ".", "host", "=", "doc", "[", "'config'", "]", "[", "'host'", "]", "self", ".", "password", "=", "doc", "[", "'config'", "]", "[", "'password'", "]", "except", "FileNotFoundError", "as", "ex", ":", "raise", "PyVLXException", "(", "'file does not exist: {0}'", ".", "format", "(", "ex", ")", ")" ]
Read configuration file.
[ "Read", "configuration", "file", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/old_api/pyvlx/config.py#L19-L34
Julius2342/pyvlx
pyvlx/node_updater.py
NodeUpdater.process_frame
async def process_frame(self, frame): """Update nodes via frame, usually received by house monitor.""" if isinstance(frame, FrameNodeStatePositionChangedNotification): if frame.node_id not in self.pyvlx.nodes: return node = self.pyvlx.nodes[frame.node_id] if isinstance(node, OpeningDevice): node.position = Position(frame.current_position) await node.after_update() elif isinstance(frame, FrameGetAllNodesInformationNotification): if frame.node_id not in self.pyvlx.nodes: return node = self.pyvlx.nodes[frame.node_id] if isinstance(node, OpeningDevice): node.position = Position(frame.current_position) await node.after_update()
python
async def process_frame(self, frame): """Update nodes via frame, usually received by house monitor.""" if isinstance(frame, FrameNodeStatePositionChangedNotification): if frame.node_id not in self.pyvlx.nodes: return node = self.pyvlx.nodes[frame.node_id] if isinstance(node, OpeningDevice): node.position = Position(frame.current_position) await node.after_update() elif isinstance(frame, FrameGetAllNodesInformationNotification): if frame.node_id not in self.pyvlx.nodes: return node = self.pyvlx.nodes[frame.node_id] if isinstance(node, OpeningDevice): node.position = Position(frame.current_position) await node.after_update()
[ "async", "def", "process_frame", "(", "self", ",", "frame", ")", ":", "if", "isinstance", "(", "frame", ",", "FrameNodeStatePositionChangedNotification", ")", ":", "if", "frame", ".", "node_id", "not", "in", "self", ".", "pyvlx", ".", "nodes", ":", "return", "node", "=", "self", ".", "pyvlx", ".", "nodes", "[", "frame", ".", "node_id", "]", "if", "isinstance", "(", "node", ",", "OpeningDevice", ")", ":", "node", ".", "position", "=", "Position", "(", "frame", ".", "current_position", ")", "await", "node", ".", "after_update", "(", ")", "elif", "isinstance", "(", "frame", ",", "FrameGetAllNodesInformationNotification", ")", ":", "if", "frame", ".", "node_id", "not", "in", "self", ".", "pyvlx", ".", "nodes", ":", "return", "node", "=", "self", ".", "pyvlx", ".", "nodes", "[", "frame", ".", "node_id", "]", "if", "isinstance", "(", "node", ",", "OpeningDevice", ")", ":", "node", ".", "position", "=", "Position", "(", "frame", ".", "current_position", ")", "await", "node", ".", "after_update", "(", ")" ]
Update nodes via frame, usually received by house monitor.
[ "Update", "nodes", "via", "frame", "usually", "received", "by", "house", "monitor", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/node_updater.py#L14-L29
Julius2342/pyvlx
pyvlx/scenes.py
Scenes.add
def add(self, scene): """Add scene, replace existing scene if scene with scene_id is present.""" if not isinstance(scene, Scene): raise TypeError() for i, j in enumerate(self.__scenes): if j.scene_id == scene.scene_id: self.__scenes[i] = scene return self.__scenes.append(scene)
python
def add(self, scene): """Add scene, replace existing scene if scene with scene_id is present.""" if not isinstance(scene, Scene): raise TypeError() for i, j in enumerate(self.__scenes): if j.scene_id == scene.scene_id: self.__scenes[i] = scene return self.__scenes.append(scene)
[ "def", "add", "(", "self", ",", "scene", ")", ":", "if", "not", "isinstance", "(", "scene", ",", "Scene", ")", ":", "raise", "TypeError", "(", ")", "for", "i", ",", "j", "in", "enumerate", "(", "self", ".", "__scenes", ")", ":", "if", "j", ".", "scene_id", "==", "scene", ".", "scene_id", ":", "self", ".", "__scenes", "[", "i", "]", "=", "scene", "return", "self", ".", "__scenes", ".", "append", "(", "scene", ")" ]
Add scene, replace existing scene if scene with scene_id is present.
[ "Add", "scene", "replace", "existing", "scene", "if", "scene", "with", "scene_id", "is", "present", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/scenes.py#L34-L42
Julius2342/pyvlx
pyvlx/scenes.py
Scenes.load
async def load(self): """Load scenes from KLF 200.""" get_scene_list = GetSceneList(pyvlx=self.pyvlx) await get_scene_list.do_api_call() if not get_scene_list.success: raise PyVLXException("Unable to retrieve scene information") for scene in get_scene_list.scenes: self.add(Scene(pyvlx=self.pyvlx, scene_id=scene[0], name=scene[1]))
python
async def load(self): """Load scenes from KLF 200.""" get_scene_list = GetSceneList(pyvlx=self.pyvlx) await get_scene_list.do_api_call() if not get_scene_list.success: raise PyVLXException("Unable to retrieve scene information") for scene in get_scene_list.scenes: self.add(Scene(pyvlx=self.pyvlx, scene_id=scene[0], name=scene[1]))
[ "async", "def", "load", "(", "self", ")", ":", "get_scene_list", "=", "GetSceneList", "(", "pyvlx", "=", "self", ".", "pyvlx", ")", "await", "get_scene_list", ".", "do_api_call", "(", ")", "if", "not", "get_scene_list", ".", "success", ":", "raise", "PyVLXException", "(", "\"Unable to retrieve scene information\"", ")", "for", "scene", "in", "get_scene_list", ".", "scenes", ":", "self", ".", "add", "(", "Scene", "(", "pyvlx", "=", "self", ".", "pyvlx", ",", "scene_id", "=", "scene", "[", "0", "]", ",", "name", "=", "scene", "[", "1", "]", ")", ")" ]
Load scenes from KLF 200.
[ "Load", "scenes", "from", "KLF", "200", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/scenes.py#L48-L55
tbielawa/bitmath
bitmath/__init__.py
best_prefix
def best_prefix(bytes, system=NIST): """Return a bitmath instance representing the best human-readable representation of the number of bytes given by ``bytes``. In addition to a numeric type, the ``bytes`` parameter may also be a bitmath type. Optionally select a preferred unit system by specifying the ``system`` keyword. Choices for ``system`` are ``bitmath.NIST`` (default) and ``bitmath.SI``. Basically a shortcut for: >>> import bitmath >>> b = bitmath.Byte(12345) >>> best = b.best_prefix() Or: >>> import bitmath >>> best = (bitmath.KiB(12345) * 4201).best_prefix() """ if isinstance(bytes, Bitmath): value = bytes.bytes else: value = bytes return Byte(value).best_prefix(system=system)
python
def best_prefix(bytes, system=NIST): """Return a bitmath instance representing the best human-readable representation of the number of bytes given by ``bytes``. In addition to a numeric type, the ``bytes`` parameter may also be a bitmath type. Optionally select a preferred unit system by specifying the ``system`` keyword. Choices for ``system`` are ``bitmath.NIST`` (default) and ``bitmath.SI``. Basically a shortcut for: >>> import bitmath >>> b = bitmath.Byte(12345) >>> best = b.best_prefix() Or: >>> import bitmath >>> best = (bitmath.KiB(12345) * 4201).best_prefix() """ if isinstance(bytes, Bitmath): value = bytes.bytes else: value = bytes return Byte(value).best_prefix(system=system)
[ "def", "best_prefix", "(", "bytes", ",", "system", "=", "NIST", ")", ":", "if", "isinstance", "(", "bytes", ",", "Bitmath", ")", ":", "value", "=", "bytes", ".", "bytes", "else", ":", "value", "=", "bytes", "return", "Byte", "(", "value", ")", ".", "best_prefix", "(", "system", "=", "system", ")" ]
Return a bitmath instance representing the best human-readable representation of the number of bytes given by ``bytes``. In addition to a numeric type, the ``bytes`` parameter may also be a bitmath type. Optionally select a preferred unit system by specifying the ``system`` keyword. Choices for ``system`` are ``bitmath.NIST`` (default) and ``bitmath.SI``. Basically a shortcut for: >>> import bitmath >>> b = bitmath.Byte(12345) >>> best = b.best_prefix() Or: >>> import bitmath >>> best = (bitmath.KiB(12345) * 4201).best_prefix()
[ "Return", "a", "bitmath", "instance", "representing", "the", "best", "human", "-", "readable", "representation", "of", "the", "number", "of", "bytes", "given", "by", "bytes", ".", "In", "addition", "to", "a", "numeric", "type", "the", "bytes", "parameter", "may", "also", "be", "a", "bitmath", "type", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1174-L1198
tbielawa/bitmath
bitmath/__init__.py
query_device_capacity
def query_device_capacity(device_fd): """Create bitmath instances of the capacity of a system block device Make one or more ioctl request to query the capacity of a block device. Perform any processing required to compute the final capacity value. Return the device capacity in bytes as a :class:`bitmath.Byte` instance. Thanks to the following resources for help figuring this out Linux/Mac ioctl's for querying block device sizes: * http://stackoverflow.com/a/12925285/263969 * http://stackoverflow.com/a/9764508/263969 :param file device_fd: A ``file`` object of the device to query the capacity of (as in ``get_device_capacity(open("/dev/sda"))``). :return: a bitmath :class:`bitmath.Byte` instance equivalent to the capacity of the target device in bytes. """ if os_name() != 'posix': raise NotImplementedError("'bitmath.query_device_capacity' is not supported on this platform: %s" % os_name()) s = os.stat(device_fd.name).st_mode if not stat.S_ISBLK(s): raise ValueError("The file descriptor provided is not of a device type") # The keys of the ``ioctl_map`` dictionary correlate to possible # values from the ``platform.system`` function. ioctl_map = { # ioctls for the "Linux" platform "Linux": { "request_params": [ # A list of parameters to calculate the block size. # # ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE ) ("BLKGETSIZE64", "L", 0x80081272) # Per <linux/fs.h>, the BLKGETSIZE64 request returns a # 'u64' sized value. This is an unsigned 64 bit # integer C type. This means to correctly "buffer" the # result we need 64 bits, or 8 bytes, of memory. # # The struct module documentation include a reference # chart relating formatting characters to native C # Types. In this case, using the "native size", the # table tells us: # # * Character 'L' - Unsigned Long C Type (u64) - Loads into a Python int type # # Confirm this character is right by running (on Linux): # # >>> import struct # >>> print 8 == struct.calcsize('L') # # The result should be true as long as your kernel # headers define BLKGETSIZE64 as a u64 type (please # file a bug report at # https://github.com/tbielawa/bitmath/issues/new if # this does *not* work for you) ], # func is how the final result is decided. Because the # Linux BLKGETSIZE64 call returns the block device # capacity in bytes as an integer value, no extra # calculations are required. Simply return the value of # BLKGETSIZE64. "func": lambda x: x["BLKGETSIZE64"] }, # ioctls for the "Darwin" (Mac OS X) platform "Darwin": { "request_params": [ # A list of parameters to calculate the block size. # # ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE ) ("DKIOCGETBLOCKCOUNT", "L", 0x40086419), # Per <sys/disk.h>: get media's block count - uint64_t # # As in the BLKGETSIZE64 example, an unsigned 64 bit # integer will use the 'L' formatting character ("DKIOCGETBLOCKSIZE", "I", 0x40046418) # Per <sys/disk.h>: get media's block size - uint32_t # # This request returns an unsigned 32 bit integer, or # in other words: just a normal integer (or 'int' c # type). That should require 4 bytes of space for # buffering. According to the struct modules # 'Formatting Characters' chart: # # * Character 'I' - Unsigned Int C Type (uint32_t) - Loads into a Python int type ], # OS X doesn't have a direct equivalent to the Linux # BLKGETSIZE64 request. Instead, we must request how many # blocks (or "sectors") are on the disk, and the size (in # bytes) of each block. Finally, multiply the two together # to obtain capacity: # # n Block * y Byte # capacity (bytes) = ------- # 1 Block "func": lambda x: x["DKIOCGETBLOCKCOUNT"] * x["DKIOCGETBLOCKSIZE"] # This expression simply accepts a dictionary ``x`` as a # parameter, and then returns the result of multiplying # the two named dictionary items together. In this case, # that means multiplying ``DKIOCGETBLOCKCOUNT``, the total # number of blocks, by ``DKIOCGETBLOCKSIZE``, the size of # each block in bytes. } } platform_params = ioctl_map[platform.system()] results = {} for req_name, fmt, request_code in platform_params['request_params']: # Read the systems native size (in bytes) of this format type. buffer_size = struct.calcsize(fmt) # Construct a buffer to store the ioctl result in buffer = ' ' * buffer_size # This code has been ran on only a few test systems. If it's # appropriate, maybe in the future we'll add try/except # conditions for some possible errors. Really only for cases # where it would add value to override the default exception # message string. buffer = fcntl.ioctl(device_fd.fileno(), request_code, buffer) # Unpack the raw result from the ioctl call into a familiar # python data type according to the ``fmt`` rules. result = struct.unpack(fmt, buffer)[0] # Add the new result to our collection results[req_name] = result return Byte(platform_params['func'](results))
python
def query_device_capacity(device_fd): """Create bitmath instances of the capacity of a system block device Make one or more ioctl request to query the capacity of a block device. Perform any processing required to compute the final capacity value. Return the device capacity in bytes as a :class:`bitmath.Byte` instance. Thanks to the following resources for help figuring this out Linux/Mac ioctl's for querying block device sizes: * http://stackoverflow.com/a/12925285/263969 * http://stackoverflow.com/a/9764508/263969 :param file device_fd: A ``file`` object of the device to query the capacity of (as in ``get_device_capacity(open("/dev/sda"))``). :return: a bitmath :class:`bitmath.Byte` instance equivalent to the capacity of the target device in bytes. """ if os_name() != 'posix': raise NotImplementedError("'bitmath.query_device_capacity' is not supported on this platform: %s" % os_name()) s = os.stat(device_fd.name).st_mode if not stat.S_ISBLK(s): raise ValueError("The file descriptor provided is not of a device type") # The keys of the ``ioctl_map`` dictionary correlate to possible # values from the ``platform.system`` function. ioctl_map = { # ioctls for the "Linux" platform "Linux": { "request_params": [ # A list of parameters to calculate the block size. # # ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE ) ("BLKGETSIZE64", "L", 0x80081272) # Per <linux/fs.h>, the BLKGETSIZE64 request returns a # 'u64' sized value. This is an unsigned 64 bit # integer C type. This means to correctly "buffer" the # result we need 64 bits, or 8 bytes, of memory. # # The struct module documentation include a reference # chart relating formatting characters to native C # Types. In this case, using the "native size", the # table tells us: # # * Character 'L' - Unsigned Long C Type (u64) - Loads into a Python int type # # Confirm this character is right by running (on Linux): # # >>> import struct # >>> print 8 == struct.calcsize('L') # # The result should be true as long as your kernel # headers define BLKGETSIZE64 as a u64 type (please # file a bug report at # https://github.com/tbielawa/bitmath/issues/new if # this does *not* work for you) ], # func is how the final result is decided. Because the # Linux BLKGETSIZE64 call returns the block device # capacity in bytes as an integer value, no extra # calculations are required. Simply return the value of # BLKGETSIZE64. "func": lambda x: x["BLKGETSIZE64"] }, # ioctls for the "Darwin" (Mac OS X) platform "Darwin": { "request_params": [ # A list of parameters to calculate the block size. # # ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE ) ("DKIOCGETBLOCKCOUNT", "L", 0x40086419), # Per <sys/disk.h>: get media's block count - uint64_t # # As in the BLKGETSIZE64 example, an unsigned 64 bit # integer will use the 'L' formatting character ("DKIOCGETBLOCKSIZE", "I", 0x40046418) # Per <sys/disk.h>: get media's block size - uint32_t # # This request returns an unsigned 32 bit integer, or # in other words: just a normal integer (or 'int' c # type). That should require 4 bytes of space for # buffering. According to the struct modules # 'Formatting Characters' chart: # # * Character 'I' - Unsigned Int C Type (uint32_t) - Loads into a Python int type ], # OS X doesn't have a direct equivalent to the Linux # BLKGETSIZE64 request. Instead, we must request how many # blocks (or "sectors") are on the disk, and the size (in # bytes) of each block. Finally, multiply the two together # to obtain capacity: # # n Block * y Byte # capacity (bytes) = ------- # 1 Block "func": lambda x: x["DKIOCGETBLOCKCOUNT"] * x["DKIOCGETBLOCKSIZE"] # This expression simply accepts a dictionary ``x`` as a # parameter, and then returns the result of multiplying # the two named dictionary items together. In this case, # that means multiplying ``DKIOCGETBLOCKCOUNT``, the total # number of blocks, by ``DKIOCGETBLOCKSIZE``, the size of # each block in bytes. } } platform_params = ioctl_map[platform.system()] results = {} for req_name, fmt, request_code in platform_params['request_params']: # Read the systems native size (in bytes) of this format type. buffer_size = struct.calcsize(fmt) # Construct a buffer to store the ioctl result in buffer = ' ' * buffer_size # This code has been ran on only a few test systems. If it's # appropriate, maybe in the future we'll add try/except # conditions for some possible errors. Really only for cases # where it would add value to override the default exception # message string. buffer = fcntl.ioctl(device_fd.fileno(), request_code, buffer) # Unpack the raw result from the ioctl call into a familiar # python data type according to the ``fmt`` rules. result = struct.unpack(fmt, buffer)[0] # Add the new result to our collection results[req_name] = result return Byte(platform_params['func'](results))
[ "def", "query_device_capacity", "(", "device_fd", ")", ":", "if", "os_name", "(", ")", "!=", "'posix'", ":", "raise", "NotImplementedError", "(", "\"'bitmath.query_device_capacity' is not supported on this platform: %s\"", "%", "os_name", "(", ")", ")", "s", "=", "os", ".", "stat", "(", "device_fd", ".", "name", ")", ".", "st_mode", "if", "not", "stat", ".", "S_ISBLK", "(", "s", ")", ":", "raise", "ValueError", "(", "\"The file descriptor provided is not of a device type\"", ")", "# The keys of the ``ioctl_map`` dictionary correlate to possible", "# values from the ``platform.system`` function.", "ioctl_map", "=", "{", "# ioctls for the \"Linux\" platform", "\"Linux\"", ":", "{", "\"request_params\"", ":", "[", "# A list of parameters to calculate the block size.", "#", "# ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE )", "(", "\"BLKGETSIZE64\"", ",", "\"L\"", ",", "0x80081272", ")", "# Per <linux/fs.h>, the BLKGETSIZE64 request returns a", "# 'u64' sized value. This is an unsigned 64 bit", "# integer C type. This means to correctly \"buffer\" the", "# result we need 64 bits, or 8 bytes, of memory.", "#", "# The struct module documentation include a reference", "# chart relating formatting characters to native C", "# Types. In this case, using the \"native size\", the", "# table tells us:", "#", "# * Character 'L' - Unsigned Long C Type (u64) - Loads into a Python int type", "#", "# Confirm this character is right by running (on Linux):", "#", "# >>> import struct", "# >>> print 8 == struct.calcsize('L')", "#", "# The result should be true as long as your kernel", "# headers define BLKGETSIZE64 as a u64 type (please", "# file a bug report at", "# https://github.com/tbielawa/bitmath/issues/new if", "# this does *not* work for you)", "]", ",", "# func is how the final result is decided. Because the", "# Linux BLKGETSIZE64 call returns the block device", "# capacity in bytes as an integer value, no extra", "# calculations are required. Simply return the value of", "# BLKGETSIZE64.", "\"func\"", ":", "lambda", "x", ":", "x", "[", "\"BLKGETSIZE64\"", "]", "}", ",", "# ioctls for the \"Darwin\" (Mac OS X) platform", "\"Darwin\"", ":", "{", "\"request_params\"", ":", "[", "# A list of parameters to calculate the block size.", "#", "# ( PARAM_NAME , FORMAT_CHAR , REQUEST_CODE )", "(", "\"DKIOCGETBLOCKCOUNT\"", ",", "\"L\"", ",", "0x40086419", ")", ",", "# Per <sys/disk.h>: get media's block count - uint64_t", "#", "# As in the BLKGETSIZE64 example, an unsigned 64 bit", "# integer will use the 'L' formatting character", "(", "\"DKIOCGETBLOCKSIZE\"", ",", "\"I\"", ",", "0x40046418", ")", "# Per <sys/disk.h>: get media's block size - uint32_t", "#", "# This request returns an unsigned 32 bit integer, or", "# in other words: just a normal integer (or 'int' c", "# type). That should require 4 bytes of space for", "# buffering. According to the struct modules", "# 'Formatting Characters' chart:", "#", "# * Character 'I' - Unsigned Int C Type (uint32_t) - Loads into a Python int type", "]", ",", "# OS X doesn't have a direct equivalent to the Linux", "# BLKGETSIZE64 request. Instead, we must request how many", "# blocks (or \"sectors\") are on the disk, and the size (in", "# bytes) of each block. Finally, multiply the two together", "# to obtain capacity:", "#", "# n Block * y Byte", "# capacity (bytes) = -------", "# 1 Block", "\"func\"", ":", "lambda", "x", ":", "x", "[", "\"DKIOCGETBLOCKCOUNT\"", "]", "*", "x", "[", "\"DKIOCGETBLOCKSIZE\"", "]", "# This expression simply accepts a dictionary ``x`` as a", "# parameter, and then returns the result of multiplying", "# the two named dictionary items together. In this case,", "# that means multiplying ``DKIOCGETBLOCKCOUNT``, the total", "# number of blocks, by ``DKIOCGETBLOCKSIZE``, the size of", "# each block in bytes.", "}", "}", "platform_params", "=", "ioctl_map", "[", "platform", ".", "system", "(", ")", "]", "results", "=", "{", "}", "for", "req_name", ",", "fmt", ",", "request_code", "in", "platform_params", "[", "'request_params'", "]", ":", "# Read the systems native size (in bytes) of this format type.", "buffer_size", "=", "struct", ".", "calcsize", "(", "fmt", ")", "# Construct a buffer to store the ioctl result in", "buffer", "=", "' '", "*", "buffer_size", "# This code has been ran on only a few test systems. If it's", "# appropriate, maybe in the future we'll add try/except", "# conditions for some possible errors. Really only for cases", "# where it would add value to override the default exception", "# message string.", "buffer", "=", "fcntl", ".", "ioctl", "(", "device_fd", ".", "fileno", "(", ")", ",", "request_code", ",", "buffer", ")", "# Unpack the raw result from the ioctl call into a familiar", "# python data type according to the ``fmt`` rules.", "result", "=", "struct", ".", "unpack", "(", "fmt", ",", "buffer", ")", "[", "0", "]", "# Add the new result to our collection", "results", "[", "req_name", "]", "=", "result", "return", "Byte", "(", "platform_params", "[", "'func'", "]", "(", "results", ")", ")" ]
Create bitmath instances of the capacity of a system block device Make one or more ioctl request to query the capacity of a block device. Perform any processing required to compute the final capacity value. Return the device capacity in bytes as a :class:`bitmath.Byte` instance. Thanks to the following resources for help figuring this out Linux/Mac ioctl's for querying block device sizes: * http://stackoverflow.com/a/12925285/263969 * http://stackoverflow.com/a/9764508/263969 :param file device_fd: A ``file`` object of the device to query the capacity of (as in ``get_device_capacity(open("/dev/sda"))``). :return: a bitmath :class:`bitmath.Byte` instance equivalent to the capacity of the target device in bytes.
[ "Create", "bitmath", "instances", "of", "the", "capacity", "of", "a", "system", "block", "device" ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1201-L1331
tbielawa/bitmath
bitmath/__init__.py
getsize
def getsize(path, bestprefix=True, system=NIST): """Return a bitmath instance in the best human-readable representation of the file size at `path`. Optionally, provide a preferred unit system by setting `system` to either `bitmath.NIST` (default) or `bitmath.SI`. Optionally, set ``bestprefix`` to ``False`` to get ``bitmath.Byte`` instances back. """ _path = os.path.realpath(path) size_bytes = os.path.getsize(_path) if bestprefix: return Byte(size_bytes).best_prefix(system=system) else: return Byte(size_bytes)
python
def getsize(path, bestprefix=True, system=NIST): """Return a bitmath instance in the best human-readable representation of the file size at `path`. Optionally, provide a preferred unit system by setting `system` to either `bitmath.NIST` (default) or `bitmath.SI`. Optionally, set ``bestprefix`` to ``False`` to get ``bitmath.Byte`` instances back. """ _path = os.path.realpath(path) size_bytes = os.path.getsize(_path) if bestprefix: return Byte(size_bytes).best_prefix(system=system) else: return Byte(size_bytes)
[ "def", "getsize", "(", "path", ",", "bestprefix", "=", "True", ",", "system", "=", "NIST", ")", ":", "_path", "=", "os", ".", "path", ".", "realpath", "(", "path", ")", "size_bytes", "=", "os", ".", "path", ".", "getsize", "(", "_path", ")", "if", "bestprefix", ":", "return", "Byte", "(", "size_bytes", ")", ".", "best_prefix", "(", "system", "=", "system", ")", "else", ":", "return", "Byte", "(", "size_bytes", ")" ]
Return a bitmath instance in the best human-readable representation of the file size at `path`. Optionally, provide a preferred unit system by setting `system` to either `bitmath.NIST` (default) or `bitmath.SI`. Optionally, set ``bestprefix`` to ``False`` to get ``bitmath.Byte`` instances back.
[ "Return", "a", "bitmath", "instance", "in", "the", "best", "human", "-", "readable", "representation", "of", "the", "file", "size", "at", "path", ".", "Optionally", "provide", "a", "preferred", "unit", "system", "by", "setting", "system", "to", "either", "bitmath", ".", "NIST", "(", "default", ")", "or", "bitmath", ".", "SI", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1334-L1348
tbielawa/bitmath
bitmath/__init__.py
listdir
def listdir(search_base, followlinks=False, filter='*', relpath=False, bestprefix=False, system=NIST): """This is a generator which recurses the directory tree `search_base`, yielding 2-tuples of: * The absolute/relative path to a discovered file * A bitmath instance representing the "apparent size" of the file. - `search_base` - The directory to begin walking down. - `followlinks` - Whether or not to follow symbolic links to directories - `filter` - A glob (see :py:mod:`fnmatch`) to filter results with (default: ``*``, everything) - `relpath` - ``True`` to return the relative path from `pwd` or ``False`` (default) to return the fully qualified path - ``bestprefix`` - set to ``False`` to get ``bitmath.Byte`` instances back instead. - `system` - Provide a preferred unit system by setting `system` to either ``bitmath.NIST`` (default) or ``bitmath.SI``. .. note:: This function does NOT return tuples for directory entities. .. note:: Symlinks to **files** are followed automatically """ for root, dirs, files in os.walk(search_base, followlinks=followlinks): for name in fnmatch.filter(files, filter): _path = os.path.join(root, name) if relpath: # RELATIVE path _return_path = os.path.relpath(_path, '.') else: # REAL path _return_path = os.path.realpath(_path) if followlinks: yield (_return_path, getsize(_path, bestprefix=bestprefix, system=system)) else: if os.path.isdir(_path) or os.path.islink(_path): pass else: yield (_return_path, getsize(_path, bestprefix=bestprefix, system=system))
python
def listdir(search_base, followlinks=False, filter='*', relpath=False, bestprefix=False, system=NIST): """This is a generator which recurses the directory tree `search_base`, yielding 2-tuples of: * The absolute/relative path to a discovered file * A bitmath instance representing the "apparent size" of the file. - `search_base` - The directory to begin walking down. - `followlinks` - Whether or not to follow symbolic links to directories - `filter` - A glob (see :py:mod:`fnmatch`) to filter results with (default: ``*``, everything) - `relpath` - ``True`` to return the relative path from `pwd` or ``False`` (default) to return the fully qualified path - ``bestprefix`` - set to ``False`` to get ``bitmath.Byte`` instances back instead. - `system` - Provide a preferred unit system by setting `system` to either ``bitmath.NIST`` (default) or ``bitmath.SI``. .. note:: This function does NOT return tuples for directory entities. .. note:: Symlinks to **files** are followed automatically """ for root, dirs, files in os.walk(search_base, followlinks=followlinks): for name in fnmatch.filter(files, filter): _path = os.path.join(root, name) if relpath: # RELATIVE path _return_path = os.path.relpath(_path, '.') else: # REAL path _return_path = os.path.realpath(_path) if followlinks: yield (_return_path, getsize(_path, bestprefix=bestprefix, system=system)) else: if os.path.isdir(_path) or os.path.islink(_path): pass else: yield (_return_path, getsize(_path, bestprefix=bestprefix, system=system))
[ "def", "listdir", "(", "search_base", ",", "followlinks", "=", "False", ",", "filter", "=", "'*'", ",", "relpath", "=", "False", ",", "bestprefix", "=", "False", ",", "system", "=", "NIST", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "search_base", ",", "followlinks", "=", "followlinks", ")", ":", "for", "name", "in", "fnmatch", ".", "filter", "(", "files", ",", "filter", ")", ":", "_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", "if", "relpath", ":", "# RELATIVE path", "_return_path", "=", "os", ".", "path", ".", "relpath", "(", "_path", ",", "'.'", ")", "else", ":", "# REAL path", "_return_path", "=", "os", ".", "path", ".", "realpath", "(", "_path", ")", "if", "followlinks", ":", "yield", "(", "_return_path", ",", "getsize", "(", "_path", ",", "bestprefix", "=", "bestprefix", ",", "system", "=", "system", ")", ")", "else", ":", "if", "os", ".", "path", ".", "isdir", "(", "_path", ")", "or", "os", ".", "path", ".", "islink", "(", "_path", ")", ":", "pass", "else", ":", "yield", "(", "_return_path", ",", "getsize", "(", "_path", ",", "bestprefix", "=", "bestprefix", ",", "system", "=", "system", ")", ")" ]
This is a generator which recurses the directory tree `search_base`, yielding 2-tuples of: * The absolute/relative path to a discovered file * A bitmath instance representing the "apparent size" of the file. - `search_base` - The directory to begin walking down. - `followlinks` - Whether or not to follow symbolic links to directories - `filter` - A glob (see :py:mod:`fnmatch`) to filter results with (default: ``*``, everything) - `relpath` - ``True`` to return the relative path from `pwd` or ``False`` (default) to return the fully qualified path - ``bestprefix`` - set to ``False`` to get ``bitmath.Byte`` instances back instead. - `system` - Provide a preferred unit system by setting `system` to either ``bitmath.NIST`` (default) or ``bitmath.SI``. .. note:: This function does NOT return tuples for directory entities. .. note:: Symlinks to **files** are followed automatically
[ "This", "is", "a", "generator", "which", "recurses", "the", "directory", "tree", "search_base", "yielding", "2", "-", "tuples", "of", ":" ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1351-L1391
tbielawa/bitmath
bitmath/__init__.py
parse_string
def parse_string(s): """Parse a string with units and try to make a bitmath object out of it. String inputs may include whitespace characters between the value and the unit. """ # Strings only please if not isinstance(s, (str, unicode)): raise ValueError("parse_string only accepts string inputs but a %s was given" % type(s)) # get the index of the first alphabetic character try: index = list([i.isalpha() for i in s]).index(True) except ValueError: # If there's no alphabetic characters we won't be able to .index(True) raise ValueError("No unit detected, can not parse string '%s' into a bitmath object" % s) # split the string into the value and the unit val, unit = s[:index], s[index:] # see if the unit exists as a type in our namespace if unit == "b": unit_class = Bit elif unit == "B": unit_class = Byte else: if not (hasattr(sys.modules[__name__], unit) and isinstance(getattr(sys.modules[__name__], unit), type)): raise ValueError("The unit %s is not a valid bitmath unit" % unit) unit_class = globals()[unit] try: val = float(val) except ValueError: raise try: return unit_class(val) except: # pragma: no cover raise ValueError("Can't parse string %s into a bitmath object" % s)
python
def parse_string(s): """Parse a string with units and try to make a bitmath object out of it. String inputs may include whitespace characters between the value and the unit. """ # Strings only please if not isinstance(s, (str, unicode)): raise ValueError("parse_string only accepts string inputs but a %s was given" % type(s)) # get the index of the first alphabetic character try: index = list([i.isalpha() for i in s]).index(True) except ValueError: # If there's no alphabetic characters we won't be able to .index(True) raise ValueError("No unit detected, can not parse string '%s' into a bitmath object" % s) # split the string into the value and the unit val, unit = s[:index], s[index:] # see if the unit exists as a type in our namespace if unit == "b": unit_class = Bit elif unit == "B": unit_class = Byte else: if not (hasattr(sys.modules[__name__], unit) and isinstance(getattr(sys.modules[__name__], unit), type)): raise ValueError("The unit %s is not a valid bitmath unit" % unit) unit_class = globals()[unit] try: val = float(val) except ValueError: raise try: return unit_class(val) except: # pragma: no cover raise ValueError("Can't parse string %s into a bitmath object" % s)
[ "def", "parse_string", "(", "s", ")", ":", "# Strings only please", "if", "not", "isinstance", "(", "s", ",", "(", "str", ",", "unicode", ")", ")", ":", "raise", "ValueError", "(", "\"parse_string only accepts string inputs but a %s was given\"", "%", "type", "(", "s", ")", ")", "# get the index of the first alphabetic character", "try", ":", "index", "=", "list", "(", "[", "i", ".", "isalpha", "(", ")", "for", "i", "in", "s", "]", ")", ".", "index", "(", "True", ")", "except", "ValueError", ":", "# If there's no alphabetic characters we won't be able to .index(True)", "raise", "ValueError", "(", "\"No unit detected, can not parse string '%s' into a bitmath object\"", "%", "s", ")", "# split the string into the value and the unit", "val", ",", "unit", "=", "s", "[", ":", "index", "]", ",", "s", "[", "index", ":", "]", "# see if the unit exists as a type in our namespace", "if", "unit", "==", "\"b\"", ":", "unit_class", "=", "Bit", "elif", "unit", "==", "\"B\"", ":", "unit_class", "=", "Byte", "else", ":", "if", "not", "(", "hasattr", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "unit", ")", "and", "isinstance", "(", "getattr", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "unit", ")", ",", "type", ")", ")", ":", "raise", "ValueError", "(", "\"The unit %s is not a valid bitmath unit\"", "%", "unit", ")", "unit_class", "=", "globals", "(", ")", "[", "unit", "]", "try", ":", "val", "=", "float", "(", "val", ")", "except", "ValueError", ":", "raise", "try", ":", "return", "unit_class", "(", "val", ")", "except", ":", "# pragma: no cover", "raise", "ValueError", "(", "\"Can't parse string %s into a bitmath object\"", "%", "s", ")" ]
Parse a string with units and try to make a bitmath object out of it. String inputs may include whitespace characters between the value and the unit.
[ "Parse", "a", "string", "with", "units", "and", "try", "to", "make", "a", "bitmath", "object", "out", "of", "it", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1394-L1434
tbielawa/bitmath
bitmath/__init__.py
parse_string_unsafe
def parse_string_unsafe(s, system=SI): """Attempt to parse a string with ambiguous units and try to make a bitmath object out of it. This may produce inaccurate results if parsing shell output. For example `ls` may say a 2730 Byte file is '2.7K'. 2730 Bytes == 2.73 kB ~= 2.666 KiB. See the documentation for all of the important details. Note the following caveats: * All inputs are assumed to be byte-based (as opposed to bit based) * Numerical inputs (those without any units) are assumed to be a number of bytes * Inputs with single letter units (k, M, G, etc) are assumed to be SI units (base-10). Set the `system` parameter to `bitmath.NIST` to change this behavior. * Inputs with an `i` character following the leading letter (Ki, Mi, Gi) are assumed to be NIST units (base 2) * Capitalization does not matter """ if not isinstance(s, (str, unicode)) and \ not isinstance(s, numbers.Number): raise ValueError("parse_string_unsafe only accepts string/number inputs but a %s was given" % type(s)) ###################################################################### # Is the input simple to parse? Just a number, or a number # masquerading as a string perhaps? # Test case: raw number input (easy!) if isinstance(s, numbers.Number): # It's just a number. Assume bytes return Byte(s) # Test case: a number pretending to be a string if isinstance(s, (str, unicode)): try: # Can we turn it directly into a number? return Byte(float(s)) except ValueError: # Nope, this is not a plain number pass ###################################################################### # At this point: # - the input is also not just a number wrapped in a string # - nor is is just a plain number type # # We need to do some more digging around now to figure out exactly # what we were given and possibly normalize the input into a # format we can recognize. # First we'll separate the number and the unit. # # Get the index of the first alphabetic character try: index = list([i.isalpha() for i in s]).index(True) except ValueError: # pragma: no cover # If there's no alphabetic characters we won't be able to .index(True) raise ValueError("No unit detected, can not parse string '%s' into a bitmath object" % s) # Split the string into the value and the unit val, unit = s[:index], s[index:] # Don't trust anything. We'll make sure the correct 'b' is in place. unit = unit.rstrip('Bb') unit += 'B' # At this point we can expect `unit` to be either: # # - 2 Characters (for SI, ex: kB or GB) # - 3 Caracters (so NIST, ex: KiB, or GiB) # # A unit with any other number of chars is not a valid unit # SI if len(unit) == 2: # Has NIST parsing been requested? if system == NIST: # NIST units requested. Ensure the unit begins with a # capital letter and is followed by an 'i' character. unit = capitalize_first(unit) # Insert an 'i' char after the first letter _unit = list(unit) _unit.insert(1, 'i') # Collapse the list back into a 3 letter string unit = ''.join(_unit) unit_class = globals()[unit] else: # Default parsing (SI format) # # Edge-case checking: SI 'thousand' is a lower-case K if unit.startswith('K'): unit = unit.replace('K', 'k') elif not unit.startswith('k'): # Otherwise, ensure the first char is capitalized unit = capitalize_first(unit) # This is an SI-type unit if unit[0] in SI_PREFIXES: unit_class = globals()[unit] # NIST elif len(unit) == 3: unit = capitalize_first(unit) # This is a NIST-type unit if unit[:2] in NIST_PREFIXES: unit_class = globals()[unit] else: # This is not a unit we recognize raise ValueError("The unit %s is not a valid bitmath unit" % unit) try: unit_class except UnboundLocalError: raise ValueError("The unit %s is not a valid bitmath unit" % unit) return unit_class(float(val))
python
def parse_string_unsafe(s, system=SI): """Attempt to parse a string with ambiguous units and try to make a bitmath object out of it. This may produce inaccurate results if parsing shell output. For example `ls` may say a 2730 Byte file is '2.7K'. 2730 Bytes == 2.73 kB ~= 2.666 KiB. See the documentation for all of the important details. Note the following caveats: * All inputs are assumed to be byte-based (as opposed to bit based) * Numerical inputs (those without any units) are assumed to be a number of bytes * Inputs with single letter units (k, M, G, etc) are assumed to be SI units (base-10). Set the `system` parameter to `bitmath.NIST` to change this behavior. * Inputs with an `i` character following the leading letter (Ki, Mi, Gi) are assumed to be NIST units (base 2) * Capitalization does not matter """ if not isinstance(s, (str, unicode)) and \ not isinstance(s, numbers.Number): raise ValueError("parse_string_unsafe only accepts string/number inputs but a %s was given" % type(s)) ###################################################################### # Is the input simple to parse? Just a number, or a number # masquerading as a string perhaps? # Test case: raw number input (easy!) if isinstance(s, numbers.Number): # It's just a number. Assume bytes return Byte(s) # Test case: a number pretending to be a string if isinstance(s, (str, unicode)): try: # Can we turn it directly into a number? return Byte(float(s)) except ValueError: # Nope, this is not a plain number pass ###################################################################### # At this point: # - the input is also not just a number wrapped in a string # - nor is is just a plain number type # # We need to do some more digging around now to figure out exactly # what we were given and possibly normalize the input into a # format we can recognize. # First we'll separate the number and the unit. # # Get the index of the first alphabetic character try: index = list([i.isalpha() for i in s]).index(True) except ValueError: # pragma: no cover # If there's no alphabetic characters we won't be able to .index(True) raise ValueError("No unit detected, can not parse string '%s' into a bitmath object" % s) # Split the string into the value and the unit val, unit = s[:index], s[index:] # Don't trust anything. We'll make sure the correct 'b' is in place. unit = unit.rstrip('Bb') unit += 'B' # At this point we can expect `unit` to be either: # # - 2 Characters (for SI, ex: kB or GB) # - 3 Caracters (so NIST, ex: KiB, or GiB) # # A unit with any other number of chars is not a valid unit # SI if len(unit) == 2: # Has NIST parsing been requested? if system == NIST: # NIST units requested. Ensure the unit begins with a # capital letter and is followed by an 'i' character. unit = capitalize_first(unit) # Insert an 'i' char after the first letter _unit = list(unit) _unit.insert(1, 'i') # Collapse the list back into a 3 letter string unit = ''.join(_unit) unit_class = globals()[unit] else: # Default parsing (SI format) # # Edge-case checking: SI 'thousand' is a lower-case K if unit.startswith('K'): unit = unit.replace('K', 'k') elif not unit.startswith('k'): # Otherwise, ensure the first char is capitalized unit = capitalize_first(unit) # This is an SI-type unit if unit[0] in SI_PREFIXES: unit_class = globals()[unit] # NIST elif len(unit) == 3: unit = capitalize_first(unit) # This is a NIST-type unit if unit[:2] in NIST_PREFIXES: unit_class = globals()[unit] else: # This is not a unit we recognize raise ValueError("The unit %s is not a valid bitmath unit" % unit) try: unit_class except UnboundLocalError: raise ValueError("The unit %s is not a valid bitmath unit" % unit) return unit_class(float(val))
[ "def", "parse_string_unsafe", "(", "s", ",", "system", "=", "SI", ")", ":", "if", "not", "isinstance", "(", "s", ",", "(", "str", ",", "unicode", ")", ")", "and", "not", "isinstance", "(", "s", ",", "numbers", ".", "Number", ")", ":", "raise", "ValueError", "(", "\"parse_string_unsafe only accepts string/number inputs but a %s was given\"", "%", "type", "(", "s", ")", ")", "######################################################################", "# Is the input simple to parse? Just a number, or a number", "# masquerading as a string perhaps?", "# Test case: raw number input (easy!)", "if", "isinstance", "(", "s", ",", "numbers", ".", "Number", ")", ":", "# It's just a number. Assume bytes", "return", "Byte", "(", "s", ")", "# Test case: a number pretending to be a string", "if", "isinstance", "(", "s", ",", "(", "str", ",", "unicode", ")", ")", ":", "try", ":", "# Can we turn it directly into a number?", "return", "Byte", "(", "float", "(", "s", ")", ")", "except", "ValueError", ":", "# Nope, this is not a plain number", "pass", "######################################################################", "# At this point:", "# - the input is also not just a number wrapped in a string", "# - nor is is just a plain number type", "#", "# We need to do some more digging around now to figure out exactly", "# what we were given and possibly normalize the input into a", "# format we can recognize.", "# First we'll separate the number and the unit.", "#", "# Get the index of the first alphabetic character", "try", ":", "index", "=", "list", "(", "[", "i", ".", "isalpha", "(", ")", "for", "i", "in", "s", "]", ")", ".", "index", "(", "True", ")", "except", "ValueError", ":", "# pragma: no cover", "# If there's no alphabetic characters we won't be able to .index(True)", "raise", "ValueError", "(", "\"No unit detected, can not parse string '%s' into a bitmath object\"", "%", "s", ")", "# Split the string into the value and the unit", "val", ",", "unit", "=", "s", "[", ":", "index", "]", ",", "s", "[", "index", ":", "]", "# Don't trust anything. We'll make sure the correct 'b' is in place.", "unit", "=", "unit", ".", "rstrip", "(", "'Bb'", ")", "unit", "+=", "'B'", "# At this point we can expect `unit` to be either:", "#", "# - 2 Characters (for SI, ex: kB or GB)", "# - 3 Caracters (so NIST, ex: KiB, or GiB)", "#", "# A unit with any other number of chars is not a valid unit", "# SI", "if", "len", "(", "unit", ")", "==", "2", ":", "# Has NIST parsing been requested?", "if", "system", "==", "NIST", ":", "# NIST units requested. Ensure the unit begins with a", "# capital letter and is followed by an 'i' character.", "unit", "=", "capitalize_first", "(", "unit", ")", "# Insert an 'i' char after the first letter", "_unit", "=", "list", "(", "unit", ")", "_unit", ".", "insert", "(", "1", ",", "'i'", ")", "# Collapse the list back into a 3 letter string", "unit", "=", "''", ".", "join", "(", "_unit", ")", "unit_class", "=", "globals", "(", ")", "[", "unit", "]", "else", ":", "# Default parsing (SI format)", "#", "# Edge-case checking: SI 'thousand' is a lower-case K", "if", "unit", ".", "startswith", "(", "'K'", ")", ":", "unit", "=", "unit", ".", "replace", "(", "'K'", ",", "'k'", ")", "elif", "not", "unit", ".", "startswith", "(", "'k'", ")", ":", "# Otherwise, ensure the first char is capitalized", "unit", "=", "capitalize_first", "(", "unit", ")", "# This is an SI-type unit", "if", "unit", "[", "0", "]", "in", "SI_PREFIXES", ":", "unit_class", "=", "globals", "(", ")", "[", "unit", "]", "# NIST", "elif", "len", "(", "unit", ")", "==", "3", ":", "unit", "=", "capitalize_first", "(", "unit", ")", "# This is a NIST-type unit", "if", "unit", "[", ":", "2", "]", "in", "NIST_PREFIXES", ":", "unit_class", "=", "globals", "(", ")", "[", "unit", "]", "else", ":", "# This is not a unit we recognize", "raise", "ValueError", "(", "\"The unit %s is not a valid bitmath unit\"", "%", "unit", ")", "try", ":", "unit_class", "except", "UnboundLocalError", ":", "raise", "ValueError", "(", "\"The unit %s is not a valid bitmath unit\"", "%", "unit", ")", "return", "unit_class", "(", "float", "(", "val", ")", ")" ]
Attempt to parse a string with ambiguous units and try to make a bitmath object out of it. This may produce inaccurate results if parsing shell output. For example `ls` may say a 2730 Byte file is '2.7K'. 2730 Bytes == 2.73 kB ~= 2.666 KiB. See the documentation for all of the important details. Note the following caveats: * All inputs are assumed to be byte-based (as opposed to bit based) * Numerical inputs (those without any units) are assumed to be a number of bytes * Inputs with single letter units (k, M, G, etc) are assumed to be SI units (base-10). Set the `system` parameter to `bitmath.NIST` to change this behavior. * Inputs with an `i` character following the leading letter (Ki, Mi, Gi) are assumed to be NIST units (base 2) * Capitalization does not matter
[ "Attempt", "to", "parse", "a", "string", "with", "ambiguous", "units", "and", "try", "to", "make", "a", "bitmath", "object", "out", "of", "it", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1437-L1559
tbielawa/bitmath
bitmath/__init__.py
format
def format(fmt_str=None, plural=False, bestprefix=False): """Context manager for printing bitmath instances. ``fmt_str`` - a formatting mini-language compat formatting string. See the @properties (above) for a list of available items. ``plural`` - True enables printing instances with 's's if they're plural. False (default) prints them as singular (no trailing 's'). ``bestprefix`` - True enables printing instances in their best human-readable representation. False, the default, prints instances using their current prefix unit. """ if 'bitmath' not in globals(): import bitmath if plural: orig_fmt_plural = bitmath.format_plural bitmath.format_plural = True if fmt_str: orig_fmt_str = bitmath.format_string bitmath.format_string = fmt_str yield if plural: bitmath.format_plural = orig_fmt_plural if fmt_str: bitmath.format_string = orig_fmt_str
python
def format(fmt_str=None, plural=False, bestprefix=False): """Context manager for printing bitmath instances. ``fmt_str`` - a formatting mini-language compat formatting string. See the @properties (above) for a list of available items. ``plural`` - True enables printing instances with 's's if they're plural. False (default) prints them as singular (no trailing 's'). ``bestprefix`` - True enables printing instances in their best human-readable representation. False, the default, prints instances using their current prefix unit. """ if 'bitmath' not in globals(): import bitmath if plural: orig_fmt_plural = bitmath.format_plural bitmath.format_plural = True if fmt_str: orig_fmt_str = bitmath.format_string bitmath.format_string = fmt_str yield if plural: bitmath.format_plural = orig_fmt_plural if fmt_str: bitmath.format_string = orig_fmt_str
[ "def", "format", "(", "fmt_str", "=", "None", ",", "plural", "=", "False", ",", "bestprefix", "=", "False", ")", ":", "if", "'bitmath'", "not", "in", "globals", "(", ")", ":", "import", "bitmath", "if", "plural", ":", "orig_fmt_plural", "=", "bitmath", ".", "format_plural", "bitmath", ".", "format_plural", "=", "True", "if", "fmt_str", ":", "orig_fmt_str", "=", "bitmath", ".", "format_string", "bitmath", ".", "format_string", "=", "fmt_str", "yield", "if", "plural", ":", "bitmath", ".", "format_plural", "=", "orig_fmt_plural", "if", "fmt_str", ":", "bitmath", ".", "format_string", "=", "orig_fmt_str" ]
Context manager for printing bitmath instances. ``fmt_str`` - a formatting mini-language compat formatting string. See the @properties (above) for a list of available items. ``plural`` - True enables printing instances with 's's if they're plural. False (default) prints them as singular (no trailing 's'). ``bestprefix`` - True enables printing instances in their best human-readable representation. False, the default, prints instances using their current prefix unit.
[ "Context", "manager", "for", "printing", "bitmath", "instances", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1565-L1595
tbielawa/bitmath
bitmath/__init__.py
cli_script_main
def cli_script_main(cli_args): """ A command line interface to basic bitmath operations. """ choices = ALL_UNIT_TYPES parser = argparse.ArgumentParser( description='Converts from one type of size to another.') parser.add_argument('--from-stdin', default=False, action='store_true', help='Reads number from stdin rather than the cli') parser.add_argument( '-f', '--from', choices=choices, nargs=1, type=str, dest='fromunit', default=['Byte'], help='Input type you are converting from. Defaultes to Byte.') parser.add_argument( '-t', '--to', choices=choices, required=False, nargs=1, type=str, help=('Input type you are converting to. ' 'Attempts to detect best result if omitted.'), dest='tounit') parser.add_argument( 'size', nargs='*', type=float, help='The number to convert.') args = parser.parse_args(cli_args) # Not sure how to cover this with tests, or if the functionality # will remain in this form long enough for it to make writing a # test worth the effort. if args.from_stdin: # pragma: no cover args.size = [float(sys.stdin.readline()[:-1])] results = [] for size in args.size: instance = getattr(__import__( 'bitmath', fromlist=['True']), args.fromunit[0])(size) # If we have a unit provided then use it if args.tounit: result = getattr(instance, args.tounit[0]) # Otherwise use the best_prefix call else: result = instance.best_prefix() results.append(result) return results
python
def cli_script_main(cli_args): """ A command line interface to basic bitmath operations. """ choices = ALL_UNIT_TYPES parser = argparse.ArgumentParser( description='Converts from one type of size to another.') parser.add_argument('--from-stdin', default=False, action='store_true', help='Reads number from stdin rather than the cli') parser.add_argument( '-f', '--from', choices=choices, nargs=1, type=str, dest='fromunit', default=['Byte'], help='Input type you are converting from. Defaultes to Byte.') parser.add_argument( '-t', '--to', choices=choices, required=False, nargs=1, type=str, help=('Input type you are converting to. ' 'Attempts to detect best result if omitted.'), dest='tounit') parser.add_argument( 'size', nargs='*', type=float, help='The number to convert.') args = parser.parse_args(cli_args) # Not sure how to cover this with tests, or if the functionality # will remain in this form long enough for it to make writing a # test worth the effort. if args.from_stdin: # pragma: no cover args.size = [float(sys.stdin.readline()[:-1])] results = [] for size in args.size: instance = getattr(__import__( 'bitmath', fromlist=['True']), args.fromunit[0])(size) # If we have a unit provided then use it if args.tounit: result = getattr(instance, args.tounit[0]) # Otherwise use the best_prefix call else: result = instance.best_prefix() results.append(result) return results
[ "def", "cli_script_main", "(", "cli_args", ")", ":", "choices", "=", "ALL_UNIT_TYPES", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Converts from one type of size to another.'", ")", "parser", ".", "add_argument", "(", "'--from-stdin'", ",", "default", "=", "False", ",", "action", "=", "'store_true'", ",", "help", "=", "'Reads number from stdin rather than the cli'", ")", "parser", ".", "add_argument", "(", "'-f'", ",", "'--from'", ",", "choices", "=", "choices", ",", "nargs", "=", "1", ",", "type", "=", "str", ",", "dest", "=", "'fromunit'", ",", "default", "=", "[", "'Byte'", "]", ",", "help", "=", "'Input type you are converting from. Defaultes to Byte.'", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "'--to'", ",", "choices", "=", "choices", ",", "required", "=", "False", ",", "nargs", "=", "1", ",", "type", "=", "str", ",", "help", "=", "(", "'Input type you are converting to. '", "'Attempts to detect best result if omitted.'", ")", ",", "dest", "=", "'tounit'", ")", "parser", ".", "add_argument", "(", "'size'", ",", "nargs", "=", "'*'", ",", "type", "=", "float", ",", "help", "=", "'The number to convert.'", ")", "args", "=", "parser", ".", "parse_args", "(", "cli_args", ")", "# Not sure how to cover this with tests, or if the functionality", "# will remain in this form long enough for it to make writing a", "# test worth the effort.", "if", "args", ".", "from_stdin", ":", "# pragma: no cover", "args", ".", "size", "=", "[", "float", "(", "sys", ".", "stdin", ".", "readline", "(", ")", "[", ":", "-", "1", "]", ")", "]", "results", "=", "[", "]", "for", "size", "in", "args", ".", "size", ":", "instance", "=", "getattr", "(", "__import__", "(", "'bitmath'", ",", "fromlist", "=", "[", "'True'", "]", ")", ",", "args", ".", "fromunit", "[", "0", "]", ")", "(", "size", ")", "# If we have a unit provided then use it", "if", "args", ".", "tounit", ":", "result", "=", "getattr", "(", "instance", ",", "args", ".", "tounit", "[", "0", "]", ")", "# Otherwise use the best_prefix call", "else", ":", "result", "=", "instance", ".", "best_prefix", "(", ")", "results", ".", "append", "(", "result", ")", "return", "results" ]
A command line interface to basic bitmath operations.
[ "A", "command", "line", "interface", "to", "basic", "bitmath", "operations", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1598-L1643
tbielawa/bitmath
bitmath/__init__.py
Bitmath._do_setup
def _do_setup(self): """Setup basic parameters for this class. `base` is the numeric base which when raised to `power` is equivalent to 1 unit of the corresponding prefix. I.e., base=2, power=10 represents 2^10, which is the NIST Binary Prefix for 1 Kibibyte. Likewise, for the SI prefix classes `base` will be 10, and the `power` for the Kilobyte is 3. """ (self._base, self._power, self._name_singular, self._name_plural) = self._setup() self._unit_value = self._base ** self._power
python
def _do_setup(self): """Setup basic parameters for this class. `base` is the numeric base which when raised to `power` is equivalent to 1 unit of the corresponding prefix. I.e., base=2, power=10 represents 2^10, which is the NIST Binary Prefix for 1 Kibibyte. Likewise, for the SI prefix classes `base` will be 10, and the `power` for the Kilobyte is 3. """ (self._base, self._power, self._name_singular, self._name_plural) = self._setup() self._unit_value = self._base ** self._power
[ "def", "_do_setup", "(", "self", ")", ":", "(", "self", ".", "_base", ",", "self", ".", "_power", ",", "self", ".", "_name_singular", ",", "self", ".", "_name_plural", ")", "=", "self", ".", "_setup", "(", ")", "self", ".", "_unit_value", "=", "self", ".", "_base", "**", "self", ".", "_power" ]
Setup basic parameters for this class. `base` is the numeric base which when raised to `power` is equivalent to 1 unit of the corresponding prefix. I.e., base=2, power=10 represents 2^10, which is the NIST Binary Prefix for 1 Kibibyte. Likewise, for the SI prefix classes `base` will be 10, and the `power` for the Kilobyte is 3.
[ "Setup", "basic", "parameters", "for", "this", "class", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L239-L250
tbielawa/bitmath
bitmath/__init__.py
Bitmath._norm
def _norm(self, value): """Normalize the input value into the fundamental unit for this prefix type. :param number value: The input value to be normalized :raises ValueError: if the input value is not a type of real number """ if isinstance(value, self.valid_types): self._byte_value = value * self._unit_value self._bit_value = self._byte_value * 8.0 else: raise ValueError("Initialization value '%s' is of an invalid type: %s. " "Must be one of %s" % ( value, type(value), ", ".join(str(x) for x in self.valid_types)))
python
def _norm(self, value): """Normalize the input value into the fundamental unit for this prefix type. :param number value: The input value to be normalized :raises ValueError: if the input value is not a type of real number """ if isinstance(value, self.valid_types): self._byte_value = value * self._unit_value self._bit_value = self._byte_value * 8.0 else: raise ValueError("Initialization value '%s' is of an invalid type: %s. " "Must be one of %s" % ( value, type(value), ", ".join(str(x) for x in self.valid_types)))
[ "def", "_norm", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "self", ".", "valid_types", ")", ":", "self", ".", "_byte_value", "=", "value", "*", "self", ".", "_unit_value", "self", ".", "_bit_value", "=", "self", ".", "_byte_value", "*", "8.0", "else", ":", "raise", "ValueError", "(", "\"Initialization value '%s' is of an invalid type: %s. \"", "\"Must be one of %s\"", "%", "(", "value", ",", "type", "(", "value", ")", ",", "\", \"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "self", ".", "valid_types", ")", ")", ")" ]
Normalize the input value into the fundamental unit for this prefix type. :param number value: The input value to be normalized :raises ValueError: if the input value is not a type of real number
[ "Normalize", "the", "input", "value", "into", "the", "fundamental", "unit", "for", "this", "prefix", "type", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L252-L267
tbielawa/bitmath
bitmath/__init__.py
Bitmath.system
def system(self): """The system of units used to measure an instance""" if self._base == 2: return "NIST" elif self._base == 10: return "SI" else: # I don't expect to ever encounter this logic branch, but # hey, it's better to have extra test coverage than # insufficient test coverage. raise ValueError("Instances mathematical base is an unsupported value: %s" % ( str(self._base)))
python
def system(self): """The system of units used to measure an instance""" if self._base == 2: return "NIST" elif self._base == 10: return "SI" else: # I don't expect to ever encounter this logic branch, but # hey, it's better to have extra test coverage than # insufficient test coverage. raise ValueError("Instances mathematical base is an unsupported value: %s" % ( str(self._base)))
[ "def", "system", "(", "self", ")", ":", "if", "self", ".", "_base", "==", "2", ":", "return", "\"NIST\"", "elif", "self", ".", "_base", "==", "10", ":", "return", "\"SI\"", "else", ":", "# I don't expect to ever encounter this logic branch, but", "# hey, it's better to have extra test coverage than", "# insufficient test coverage.", "raise", "ValueError", "(", "\"Instances mathematical base is an unsupported value: %s\"", "%", "(", "str", "(", "self", ".", "_base", ")", ")", ")" ]
The system of units used to measure an instance
[ "The", "system", "of", "units", "used", "to", "measure", "an", "instance" ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L299-L310
tbielawa/bitmath
bitmath/__init__.py
Bitmath.unit
def unit(self): """The string that is this instances prefix unit name in agreement with this instance value (singular or plural). Following the convention that only 1 is singular. This will always be the singular form when :attr:`bitmath.format_plural` is ``False`` (default value). For example: >>> KiB(1).unit == 'KiB' >>> Byte(0).unit == 'Bytes' >>> Byte(1).unit == 'Byte' >>> Byte(1.1).unit == 'Bytes' >>> Gb(2).unit == 'Gbs' """ global format_plural if self.prefix_value == 1: # If it's a '1', return it singular, no matter what return self._name_singular elif format_plural: # Pluralization requested return self._name_plural else: # Pluralization NOT requested, and the value is not 1 return self._name_singular
python
def unit(self): """The string that is this instances prefix unit name in agreement with this instance value (singular or plural). Following the convention that only 1 is singular. This will always be the singular form when :attr:`bitmath.format_plural` is ``False`` (default value). For example: >>> KiB(1).unit == 'KiB' >>> Byte(0).unit == 'Bytes' >>> Byte(1).unit == 'Byte' >>> Byte(1.1).unit == 'Bytes' >>> Gb(2).unit == 'Gbs' """ global format_plural if self.prefix_value == 1: # If it's a '1', return it singular, no matter what return self._name_singular elif format_plural: # Pluralization requested return self._name_plural else: # Pluralization NOT requested, and the value is not 1 return self._name_singular
[ "def", "unit", "(", "self", ")", ":", "global", "format_plural", "if", "self", ".", "prefix_value", "==", "1", ":", "# If it's a '1', return it singular, no matter what", "return", "self", ".", "_name_singular", "elif", "format_plural", ":", "# Pluralization requested", "return", "self", ".", "_name_plural", "else", ":", "# Pluralization NOT requested, and the value is not 1", "return", "self", ".", "_name_singular" ]
The string that is this instances prefix unit name in agreement with this instance value (singular or plural). Following the convention that only 1 is singular. This will always be the singular form when :attr:`bitmath.format_plural` is ``False`` (default value). For example: >>> KiB(1).unit == 'KiB' >>> Byte(0).unit == 'Bytes' >>> Byte(1).unit == 'Byte' >>> Byte(1.1).unit == 'Bytes' >>> Gb(2).unit == 'Gbs'
[ "The", "string", "that", "is", "this", "instances", "prefix", "unit", "name", "in", "agreement", "with", "this", "instance", "value", "(", "singular", "or", "plural", ")", ".", "Following", "the", "convention", "that", "only", "1", "is", "singular", ".", "This", "will", "always", "be", "the", "singular", "form", "when", ":", "attr", ":", "bitmath", ".", "format_plural", "is", "False", "(", "default", "value", ")", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L313-L338
tbielawa/bitmath
bitmath/__init__.py
Bitmath.from_other
def from_other(cls, item): """Factory function to return instances of `item` converted into a new instance of ``cls``. Because this is a class method, it may be called from any bitmath class object without the need to explicitly instantiate the class ahead of time. *Implicit Parameter:* * ``cls`` A bitmath class, implicitly set to the class of the instance object it is called on *User Supplied Parameter:* * ``item`` A :class:`bitmath.Bitmath` subclass instance *Example:* >>> import bitmath >>> kib = bitmath.KiB.from_other(bitmath.MiB(1)) >>> print kib KiB(1024.0) """ if isinstance(item, Bitmath): return cls(bits=item.bits) else: raise ValueError("The provided items must be a valid bitmath class: %s" % str(item.__class__))
python
def from_other(cls, item): """Factory function to return instances of `item` converted into a new instance of ``cls``. Because this is a class method, it may be called from any bitmath class object without the need to explicitly instantiate the class ahead of time. *Implicit Parameter:* * ``cls`` A bitmath class, implicitly set to the class of the instance object it is called on *User Supplied Parameter:* * ``item`` A :class:`bitmath.Bitmath` subclass instance *Example:* >>> import bitmath >>> kib = bitmath.KiB.from_other(bitmath.MiB(1)) >>> print kib KiB(1024.0) """ if isinstance(item, Bitmath): return cls(bits=item.bits) else: raise ValueError("The provided items must be a valid bitmath class: %s" % str(item.__class__))
[ "def", "from_other", "(", "cls", ",", "item", ")", ":", "if", "isinstance", "(", "item", ",", "Bitmath", ")", ":", "return", "cls", "(", "bits", "=", "item", ".", "bits", ")", "else", ":", "raise", "ValueError", "(", "\"The provided items must be a valid bitmath class: %s\"", "%", "str", "(", "item", ".", "__class__", ")", ")" ]
Factory function to return instances of `item` converted into a new instance of ``cls``. Because this is a class method, it may be called from any bitmath class object without the need to explicitly instantiate the class ahead of time. *Implicit Parameter:* * ``cls`` A bitmath class, implicitly set to the class of the instance object it is called on *User Supplied Parameter:* * ``item`` A :class:`bitmath.Bitmath` subclass instance *Example:* >>> import bitmath >>> kib = bitmath.KiB.from_other(bitmath.MiB(1)) >>> print kib KiB(1024.0)
[ "Factory", "function", "to", "return", "instances", "of", "item", "converted", "into", "a", "new", "instance", "of", "cls", ".", "Because", "this", "is", "a", "class", "method", "it", "may", "be", "called", "from", "any", "bitmath", "class", "object", "without", "the", "need", "to", "explicitly", "instantiate", "the", "class", "ahead", "of", "time", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L371-L398
tbielawa/bitmath
bitmath/__init__.py
Bitmath.format
def format(self, fmt): """Return a representation of this instance formatted with user supplied syntax""" _fmt_params = { 'base': self.base, 'bin': self.bin, 'binary': self.binary, 'bits': self.bits, 'bytes': self.bytes, 'power': self.power, 'system': self.system, 'unit': self.unit, 'unit_plural': self.unit_plural, 'unit_singular': self.unit_singular, 'value': self.value } return fmt.format(**_fmt_params)
python
def format(self, fmt): """Return a representation of this instance formatted with user supplied syntax""" _fmt_params = { 'base': self.base, 'bin': self.bin, 'binary': self.binary, 'bits': self.bits, 'bytes': self.bytes, 'power': self.power, 'system': self.system, 'unit': self.unit, 'unit_plural': self.unit_plural, 'unit_singular': self.unit_singular, 'value': self.value } return fmt.format(**_fmt_params)
[ "def", "format", "(", "self", ",", "fmt", ")", ":", "_fmt_params", "=", "{", "'base'", ":", "self", ".", "base", ",", "'bin'", ":", "self", ".", "bin", ",", "'binary'", ":", "self", ".", "binary", ",", "'bits'", ":", "self", ".", "bits", ",", "'bytes'", ":", "self", ".", "bytes", ",", "'power'", ":", "self", ".", "power", ",", "'system'", ":", "self", ".", "system", ",", "'unit'", ":", "self", ".", "unit", ",", "'unit_plural'", ":", "self", ".", "unit_plural", ",", "'unit_singular'", ":", "self", ".", "unit_singular", ",", "'value'", ":", "self", ".", "value", "}", "return", "fmt", ".", "format", "(", "*", "*", "_fmt_params", ")" ]
Return a representation of this instance formatted with user supplied syntax
[ "Return", "a", "representation", "of", "this", "instance", "formatted", "with", "user", "supplied", "syntax" ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L416-L433
tbielawa/bitmath
bitmath/__init__.py
Bitmath.best_prefix
def best_prefix(self, system=None): """Optional parameter, `system`, allows you to prefer NIST or SI in the results. By default, the current system is used (Bit/Byte default to NIST). Logic discussion/notes: Base-case, does it need converting? If the instance is less than one Byte, return the instance as a Bit instance. Else, begin by recording the unit system the instance is defined by. This determines which steps (NIST_STEPS/SI_STEPS) we iterate over. If the instance is not already a ``Byte`` instance, convert it to one. NIST units step up by powers of 1024, SI units step up by powers of 1000. Take integer value of the log(base=STEP_POWER) of the instance's byte value. E.g.: >>> int(math.log(Gb(100).bytes, 1000)) 3 This will return a value >= 0. The following determines the 'best prefix unit' for representation: * result == 0, best represented as a Byte * result >= len(SYSTEM_STEPS), best represented as an Exbi/Exabyte * 0 < result < len(SYSTEM_STEPS), best represented as SYSTEM_PREFIXES[result-1] """ # Use absolute value so we don't return Bit's for *everything* # less than Byte(1). From github issue #55 if abs(self) < Byte(1): return Bit.from_other(self) else: if type(self) is Byte: # pylint: disable=unidiomatic-typecheck _inst = self else: _inst = Byte.from_other(self) # Which table to consult? Was a preferred system provided? if system is None: # No preference. Use existing system if self.system == 'NIST': _STEPS = NIST_PREFIXES _BASE = 1024 elif self.system == 'SI': _STEPS = SI_PREFIXES _BASE = 1000 # Anything else would have raised by now else: # Preferred system provided. if system == NIST: _STEPS = NIST_PREFIXES _BASE = 1024 elif system == SI: _STEPS = SI_PREFIXES _BASE = 1000 else: raise ValueError("Invalid value given for 'system' parameter." " Must be one of NIST or SI") # Index of the string of the best prefix in the STEPS list _index = int(math.log(abs(_inst.bytes), _BASE)) # Recall that the log() function returns >= 0. This doesn't # map to the STEPS list 1:1. That is to say, 0 is handled with # special care. So if the _index is 1, we actually want item 0 # in the list. if _index == 0: # Already a Byte() type, so return it. return _inst elif _index >= len(_STEPS): # This is a really big number. Use the biggest prefix we've got _best_prefix = _STEPS[-1] elif 0 < _index < len(_STEPS): # There is an appropriate prefix unit to represent this _best_prefix = _STEPS[_index - 1] _conversion_method = getattr( self, 'to_%sB' % _best_prefix) return _conversion_method()
python
def best_prefix(self, system=None): """Optional parameter, `system`, allows you to prefer NIST or SI in the results. By default, the current system is used (Bit/Byte default to NIST). Logic discussion/notes: Base-case, does it need converting? If the instance is less than one Byte, return the instance as a Bit instance. Else, begin by recording the unit system the instance is defined by. This determines which steps (NIST_STEPS/SI_STEPS) we iterate over. If the instance is not already a ``Byte`` instance, convert it to one. NIST units step up by powers of 1024, SI units step up by powers of 1000. Take integer value of the log(base=STEP_POWER) of the instance's byte value. E.g.: >>> int(math.log(Gb(100).bytes, 1000)) 3 This will return a value >= 0. The following determines the 'best prefix unit' for representation: * result == 0, best represented as a Byte * result >= len(SYSTEM_STEPS), best represented as an Exbi/Exabyte * 0 < result < len(SYSTEM_STEPS), best represented as SYSTEM_PREFIXES[result-1] """ # Use absolute value so we don't return Bit's for *everything* # less than Byte(1). From github issue #55 if abs(self) < Byte(1): return Bit.from_other(self) else: if type(self) is Byte: # pylint: disable=unidiomatic-typecheck _inst = self else: _inst = Byte.from_other(self) # Which table to consult? Was a preferred system provided? if system is None: # No preference. Use existing system if self.system == 'NIST': _STEPS = NIST_PREFIXES _BASE = 1024 elif self.system == 'SI': _STEPS = SI_PREFIXES _BASE = 1000 # Anything else would have raised by now else: # Preferred system provided. if system == NIST: _STEPS = NIST_PREFIXES _BASE = 1024 elif system == SI: _STEPS = SI_PREFIXES _BASE = 1000 else: raise ValueError("Invalid value given for 'system' parameter." " Must be one of NIST or SI") # Index of the string of the best prefix in the STEPS list _index = int(math.log(abs(_inst.bytes), _BASE)) # Recall that the log() function returns >= 0. This doesn't # map to the STEPS list 1:1. That is to say, 0 is handled with # special care. So if the _index is 1, we actually want item 0 # in the list. if _index == 0: # Already a Byte() type, so return it. return _inst elif _index >= len(_STEPS): # This is a really big number. Use the biggest prefix we've got _best_prefix = _STEPS[-1] elif 0 < _index < len(_STEPS): # There is an appropriate prefix unit to represent this _best_prefix = _STEPS[_index - 1] _conversion_method = getattr( self, 'to_%sB' % _best_prefix) return _conversion_method()
[ "def", "best_prefix", "(", "self", ",", "system", "=", "None", ")", ":", "# Use absolute value so we don't return Bit's for *everything*", "# less than Byte(1). From github issue #55", "if", "abs", "(", "self", ")", "<", "Byte", "(", "1", ")", ":", "return", "Bit", ".", "from_other", "(", "self", ")", "else", ":", "if", "type", "(", "self", ")", "is", "Byte", ":", "# pylint: disable=unidiomatic-typecheck", "_inst", "=", "self", "else", ":", "_inst", "=", "Byte", ".", "from_other", "(", "self", ")", "# Which table to consult? Was a preferred system provided?", "if", "system", "is", "None", ":", "# No preference. Use existing system", "if", "self", ".", "system", "==", "'NIST'", ":", "_STEPS", "=", "NIST_PREFIXES", "_BASE", "=", "1024", "elif", "self", ".", "system", "==", "'SI'", ":", "_STEPS", "=", "SI_PREFIXES", "_BASE", "=", "1000", "# Anything else would have raised by now", "else", ":", "# Preferred system provided.", "if", "system", "==", "NIST", ":", "_STEPS", "=", "NIST_PREFIXES", "_BASE", "=", "1024", "elif", "system", "==", "SI", ":", "_STEPS", "=", "SI_PREFIXES", "_BASE", "=", "1000", "else", ":", "raise", "ValueError", "(", "\"Invalid value given for 'system' parameter.\"", "\" Must be one of NIST or SI\"", ")", "# Index of the string of the best prefix in the STEPS list", "_index", "=", "int", "(", "math", ".", "log", "(", "abs", "(", "_inst", ".", "bytes", ")", ",", "_BASE", ")", ")", "# Recall that the log() function returns >= 0. This doesn't", "# map to the STEPS list 1:1. That is to say, 0 is handled with", "# special care. So if the _index is 1, we actually want item 0", "# in the list.", "if", "_index", "==", "0", ":", "# Already a Byte() type, so return it.", "return", "_inst", "elif", "_index", ">=", "len", "(", "_STEPS", ")", ":", "# This is a really big number. Use the biggest prefix we've got", "_best_prefix", "=", "_STEPS", "[", "-", "1", "]", "elif", "0", "<", "_index", "<", "len", "(", "_STEPS", ")", ":", "# There is an appropriate prefix unit to represent this", "_best_prefix", "=", "_STEPS", "[", "_index", "-", "1", "]", "_conversion_method", "=", "getattr", "(", "self", ",", "'to_%sB'", "%", "_best_prefix", ")", "return", "_conversion_method", "(", ")" ]
Optional parameter, `system`, allows you to prefer NIST or SI in the results. By default, the current system is used (Bit/Byte default to NIST). Logic discussion/notes: Base-case, does it need converting? If the instance is less than one Byte, return the instance as a Bit instance. Else, begin by recording the unit system the instance is defined by. This determines which steps (NIST_STEPS/SI_STEPS) we iterate over. If the instance is not already a ``Byte`` instance, convert it to one. NIST units step up by powers of 1024, SI units step up by powers of 1000. Take integer value of the log(base=STEP_POWER) of the instance's byte value. E.g.: >>> int(math.log(Gb(100).bytes, 1000)) 3 This will return a value >= 0. The following determines the 'best prefix unit' for representation: * result == 0, best represented as a Byte * result >= len(SYSTEM_STEPS), best represented as an Exbi/Exabyte * 0 < result < len(SYSTEM_STEPS), best represented as SYSTEM_PREFIXES[result-1]
[ "Optional", "parameter", "system", "allows", "you", "to", "prefer", "NIST", "or", "SI", "in", "the", "results", ".", "By", "default", "the", "current", "system", "is", "used", "(", "Bit", "/", "Byte", "default", "to", "NIST", ")", "." ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L439-L528
tbielawa/bitmath
bitmath/__init__.py
Bit._norm
def _norm(self, value): """Normalize the input value into the fundamental unit for this prefix type""" self._bit_value = value * self._unit_value self._byte_value = self._bit_value / 8.0
python
def _norm(self, value): """Normalize the input value into the fundamental unit for this prefix type""" self._bit_value = value * self._unit_value self._byte_value = self._bit_value / 8.0
[ "def", "_norm", "(", "self", ",", "value", ")", ":", "self", ".", "_bit_value", "=", "value", "*", "self", ".", "_unit_value", "self", ".", "_byte_value", "=", "self", ".", "_bit_value", "/", "8.0" ]
Normalize the input value into the fundamental unit for this prefix type
[ "Normalize", "the", "input", "value", "into", "the", "fundamental", "unit", "for", "this", "prefix", "type" ]
train
https://github.com/tbielawa/bitmath/blob/58ad3ac5f076cc6e53f36a91af055c6028c850a5/bitmath/__init__.py#L1091-L1095
Julius2342/pyvlx
pyvlx/frames/frame_helper.py
calc_crc
def calc_crc(raw): """Calculate cyclic redundancy check (CRC).""" crc = 0 for sym in raw: crc = crc ^ int(sym) return crc
python
def calc_crc(raw): """Calculate cyclic redundancy check (CRC).""" crc = 0 for sym in raw: crc = crc ^ int(sym) return crc
[ "def", "calc_crc", "(", "raw", ")", ":", "crc", "=", "0", "for", "sym", "in", "raw", ":", "crc", "=", "crc", "^", "int", "(", "sym", ")", "return", "crc" ]
Calculate cyclic redundancy check (CRC).
[ "Calculate", "cyclic", "redundancy", "check", "(", "CRC", ")", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_helper.py#L6-L11
Julius2342/pyvlx
pyvlx/frames/frame_helper.py
extract_from_frame
def extract_from_frame(data): """Extract payload and command from frame.""" if len(data) <= 4: raise PyVLXException("could_not_extract_from_frame_too_short", data=data) length = data[0] * 256 + data[1] - 1 if len(data) != length + 3: raise PyVLXException("could_not_extract_from_frame_invalid_length", data=data, current_length=len(data), expected_length=length + 3) if calc_crc(data[:-1]) != data[-1]: raise PyVLXException("could_not_extract_from_frame_invalid_crc", data=data, expected_crc=calc_crc(data[:-1]), current_crc=data[-1]) payload = data[4:-1] try: command = Command(data[2] * 256 + data[3]) except ValueError: raise PyVLXException("could_not_extract_from_frame_command", data=data) return command, payload
python
def extract_from_frame(data): """Extract payload and command from frame.""" if len(data) <= 4: raise PyVLXException("could_not_extract_from_frame_too_short", data=data) length = data[0] * 256 + data[1] - 1 if len(data) != length + 3: raise PyVLXException("could_not_extract_from_frame_invalid_length", data=data, current_length=len(data), expected_length=length + 3) if calc_crc(data[:-1]) != data[-1]: raise PyVLXException("could_not_extract_from_frame_invalid_crc", data=data, expected_crc=calc_crc(data[:-1]), current_crc=data[-1]) payload = data[4:-1] try: command = Command(data[2] * 256 + data[3]) except ValueError: raise PyVLXException("could_not_extract_from_frame_command", data=data) return command, payload
[ "def", "extract_from_frame", "(", "data", ")", ":", "if", "len", "(", "data", ")", "<=", "4", ":", "raise", "PyVLXException", "(", "\"could_not_extract_from_frame_too_short\"", ",", "data", "=", "data", ")", "length", "=", "data", "[", "0", "]", "*", "256", "+", "data", "[", "1", "]", "-", "1", "if", "len", "(", "data", ")", "!=", "length", "+", "3", ":", "raise", "PyVLXException", "(", "\"could_not_extract_from_frame_invalid_length\"", ",", "data", "=", "data", ",", "current_length", "=", "len", "(", "data", ")", ",", "expected_length", "=", "length", "+", "3", ")", "if", "calc_crc", "(", "data", "[", ":", "-", "1", "]", ")", "!=", "data", "[", "-", "1", "]", ":", "raise", "PyVLXException", "(", "\"could_not_extract_from_frame_invalid_crc\"", ",", "data", "=", "data", ",", "expected_crc", "=", "calc_crc", "(", "data", "[", ":", "-", "1", "]", ")", ",", "current_crc", "=", "data", "[", "-", "1", "]", ")", "payload", "=", "data", "[", "4", ":", "-", "1", "]", "try", ":", "command", "=", "Command", "(", "data", "[", "2", "]", "*", "256", "+", "data", "[", "3", "]", ")", "except", "ValueError", ":", "raise", "PyVLXException", "(", "\"could_not_extract_from_frame_command\"", ",", "data", "=", "data", ")", "return", "command", ",", "payload" ]
Extract payload and command from frame.
[ "Extract", "payload", "and", "command", "from", "frame", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_helper.py#L14-L28
Julius2342/pyvlx
pyvlx/frames/frame_node_information_changed.py
FrameNodeInformationChangedNotification.get_payload
def get_payload(self): """Return Payload.""" payload = bytes([self.node_id]) payload += string_to_bytes(self.name, 64) payload += bytes([self.order >> 8 & 255, self.order & 255]) payload += bytes([self.placement]) payload += bytes([self.node_variation.value]) return payload
python
def get_payload(self): """Return Payload.""" payload = bytes([self.node_id]) payload += string_to_bytes(self.name, 64) payload += bytes([self.order >> 8 & 255, self.order & 255]) payload += bytes([self.placement]) payload += bytes([self.node_variation.value]) return payload
[ "def", "get_payload", "(", "self", ")", ":", "payload", "=", "bytes", "(", "[", "self", ".", "node_id", "]", ")", "payload", "+=", "string_to_bytes", "(", "self", ".", "name", ",", "64", ")", "payload", "+=", "bytes", "(", "[", "self", ".", "order", ">>", "8", "&", "255", ",", "self", ".", "order", "&", "255", "]", ")", "payload", "+=", "bytes", "(", "[", "self", ".", "placement", "]", ")", "payload", "+=", "bytes", "(", "[", "self", ".", "node_variation", ".", "value", "]", ")", "return", "payload" ]
Return Payload.
[ "Return", "Payload", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_node_information_changed.py#L22-L29
Julius2342/pyvlx
pyvlx/frames/frame_node_information_changed.py
FrameNodeInformationChangedNotification.from_payload
def from_payload(self, payload): """Init frame from binary data.""" self.node_id = payload[0] self.name = bytes_to_string(payload[1:65]) self.order = payload[65] * 256 + payload[66] self.placement = payload[67] self.node_variation = NodeVariation(payload[68])
python
def from_payload(self, payload): """Init frame from binary data.""" self.node_id = payload[0] self.name = bytes_to_string(payload[1:65]) self.order = payload[65] * 256 + payload[66] self.placement = payload[67] self.node_variation = NodeVariation(payload[68])
[ "def", "from_payload", "(", "self", ",", "payload", ")", ":", "self", ".", "node_id", "=", "payload", "[", "0", "]", "self", ".", "name", "=", "bytes_to_string", "(", "payload", "[", "1", ":", "65", "]", ")", "self", ".", "order", "=", "payload", "[", "65", "]", "*", "256", "+", "payload", "[", "66", "]", "self", ".", "placement", "=", "payload", "[", "67", "]", "self", ".", "node_variation", "=", "NodeVariation", "(", "payload", "[", "68", "]", ")" ]
Init frame from binary data.
[ "Init", "frame", "from", "binary", "data", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_node_information_changed.py#L31-L37
Julius2342/pyvlx
pyvlx/frames/frame_get_node_information.py
FrameGetNodeInformationConfirmation.from_payload
def from_payload(self, payload): """Init frame from binary data.""" self.status = NodeInformationStatus(payload[0]) self.node_id = payload[1]
python
def from_payload(self, payload): """Init frame from binary data.""" self.status = NodeInformationStatus(payload[0]) self.node_id = payload[1]
[ "def", "from_payload", "(", "self", ",", "payload", ")", ":", "self", ".", "status", "=", "NodeInformationStatus", "(", "payload", "[", "0", "]", ")", "self", ".", "node_id", "=", "payload", "[", "1", "]" ]
Init frame from binary data.
[ "Init", "frame", "from", "binary", "data", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/frames/frame_get_node_information.py#L60-L63
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._get_meta
def _get_meta(obj): """Extract metadata, if any, from given object.""" if hasattr(obj, 'meta'): # Spectrum or model meta = deepcopy(obj.meta) elif isinstance(obj, dict): # Metadata meta = deepcopy(obj) else: # Number meta = {} return meta
python
def _get_meta(obj): """Extract metadata, if any, from given object.""" if hasattr(obj, 'meta'): # Spectrum or model meta = deepcopy(obj.meta) elif isinstance(obj, dict): # Metadata meta = deepcopy(obj) else: # Number meta = {} return meta
[ "def", "_get_meta", "(", "obj", ")", ":", "if", "hasattr", "(", "obj", ",", "'meta'", ")", ":", "# Spectrum or model", "meta", "=", "deepcopy", "(", "obj", ".", "meta", ")", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "# Metadata", "meta", "=", "deepcopy", "(", "obj", ")", "else", ":", "# Number", "meta", "=", "{", "}", "return", "meta" ]
Extract metadata, if any, from given object.
[ "Extract", "metadata", "if", "any", "from", "given", "object", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L193-L201
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._merge_meta
def _merge_meta(left, right, result, clean=True): """Merge metadata from left and right onto results. This is used during class initialization. This should also be used by operators to merge metadata after creating a new instance but before returning it. Result's metadata is modified in-place. Parameters ---------- left, right : number, `BaseSpectrum`, or `~astropy.modeling.models` Inputs of an operation. result : `BaseSpectrum` Output spectrum object. clean : bool Remove ``'header'`` and ``'expr'`` entries from inputs. """ # Copies are returned because they need some clean-up below. left = BaseSpectrum._get_meta(left) right = BaseSpectrum._get_meta(right) # Remove these from going into result to avoid mess. # header = FITS header metadata # expr = ASTROLIB PYSYNPHOT expression if clean: for key in ('header', 'expr'): for d in (left, right): if key in d: del d[key] mid = metadata.merge(left, right, metadata_conflicts='silent') result.meta = metadata.merge(result.meta, mid, metadata_conflicts='silent')
python
def _merge_meta(left, right, result, clean=True): """Merge metadata from left and right onto results. This is used during class initialization. This should also be used by operators to merge metadata after creating a new instance but before returning it. Result's metadata is modified in-place. Parameters ---------- left, right : number, `BaseSpectrum`, or `~astropy.modeling.models` Inputs of an operation. result : `BaseSpectrum` Output spectrum object. clean : bool Remove ``'header'`` and ``'expr'`` entries from inputs. """ # Copies are returned because they need some clean-up below. left = BaseSpectrum._get_meta(left) right = BaseSpectrum._get_meta(right) # Remove these from going into result to avoid mess. # header = FITS header metadata # expr = ASTROLIB PYSYNPHOT expression if clean: for key in ('header', 'expr'): for d in (left, right): if key in d: del d[key] mid = metadata.merge(left, right, metadata_conflicts='silent') result.meta = metadata.merge(result.meta, mid, metadata_conflicts='silent')
[ "def", "_merge_meta", "(", "left", ",", "right", ",", "result", ",", "clean", "=", "True", ")", ":", "# Copies are returned because they need some clean-up below.", "left", "=", "BaseSpectrum", ".", "_get_meta", "(", "left", ")", "right", "=", "BaseSpectrum", ".", "_get_meta", "(", "right", ")", "# Remove these from going into result to avoid mess.", "# header = FITS header metadata", "# expr = ASTROLIB PYSYNPHOT expression", "if", "clean", ":", "for", "key", "in", "(", "'header'", ",", "'expr'", ")", ":", "for", "d", "in", "(", "left", ",", "right", ")", ":", "if", "key", "in", "d", ":", "del", "d", "[", "key", "]", "mid", "=", "metadata", ".", "merge", "(", "left", ",", "right", ",", "metadata_conflicts", "=", "'silent'", ")", "result", ".", "meta", "=", "metadata", ".", "merge", "(", "result", ".", "meta", ",", "mid", ",", "metadata_conflicts", "=", "'silent'", ")" ]
Merge metadata from left and right onto results. This is used during class initialization. This should also be used by operators to merge metadata after creating a new instance but before returning it. Result's metadata is modified in-place. Parameters ---------- left, right : number, `BaseSpectrum`, or `~astropy.modeling.models` Inputs of an operation. result : `BaseSpectrum` Output spectrum object. clean : bool Remove ``'header'`` and ``'expr'`` entries from inputs.
[ "Merge", "metadata", "from", "left", "and", "right", "onto", "results", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L204-L239
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._process_generic_param
def _process_generic_param(pval, def_unit, equivalencies=[]): """Process generic model parameter.""" if isinstance(pval, u.Quantity): outval = pval.to(def_unit, equivalencies).value else: # Assume already in desired unit outval = pval return outval
python
def _process_generic_param(pval, def_unit, equivalencies=[]): """Process generic model parameter.""" if isinstance(pval, u.Quantity): outval = pval.to(def_unit, equivalencies).value else: # Assume already in desired unit outval = pval return outval
[ "def", "_process_generic_param", "(", "pval", ",", "def_unit", ",", "equivalencies", "=", "[", "]", ")", ":", "if", "isinstance", "(", "pval", ",", "u", ".", "Quantity", ")", ":", "outval", "=", "pval", ".", "to", "(", "def_unit", ",", "equivalencies", ")", ".", "value", "else", ":", "# Assume already in desired unit", "outval", "=", "pval", "return", "outval" ]
Process generic model parameter.
[ "Process", "generic", "model", "parameter", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L242-L248
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._process_wave_param
def _process_wave_param(self, pval): """Process individual model parameter representing wavelength.""" return self._process_generic_param( pval, self._internal_wave_unit, equivalencies=u.spectral())
python
def _process_wave_param(self, pval): """Process individual model parameter representing wavelength.""" return self._process_generic_param( pval, self._internal_wave_unit, equivalencies=u.spectral())
[ "def", "_process_wave_param", "(", "self", ",", "pval", ")", ":", "return", "self", ".", "_process_generic_param", "(", "pval", ",", "self", ".", "_internal_wave_unit", ",", "equivalencies", "=", "u", ".", "spectral", "(", ")", ")" ]
Process individual model parameter representing wavelength.
[ "Process", "individual", "model", "parameter", "representing", "wavelength", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L250-L253
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.waveset
def waveset(self): """Optimal wavelengths for sampling the spectrum or bandpass.""" w = get_waveset(self.model) if w is not None: utils.validate_wavelengths(w) w = w * self._internal_wave_unit return w
python
def waveset(self): """Optimal wavelengths for sampling the spectrum or bandpass.""" w = get_waveset(self.model) if w is not None: utils.validate_wavelengths(w) w = w * self._internal_wave_unit return w
[ "def", "waveset", "(", "self", ")", ":", "w", "=", "get_waveset", "(", "self", ".", "model", ")", "if", "w", "is", "not", "None", ":", "utils", ".", "validate_wavelengths", "(", "w", ")", "w", "=", "w", "*", "self", ".", "_internal_wave_unit", "return", "w" ]
Optimal wavelengths for sampling the spectrum or bandpass.
[ "Optimal", "wavelengths", "for", "sampling", "the", "spectrum", "or", "bandpass", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L309-L315
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.waverange
def waverange(self): """Range of `waveset`.""" if self.waveset is None: x = [None, None] else: x = u.Quantity([self.waveset.min(), self.waveset.max()]) return x
python
def waverange(self): """Range of `waveset`.""" if self.waveset is None: x = [None, None] else: x = u.Quantity([self.waveset.min(), self.waveset.max()]) return x
[ "def", "waverange", "(", "self", ")", ":", "if", "self", ".", "waveset", "is", "None", ":", "x", "=", "[", "None", ",", "None", "]", "else", ":", "x", "=", "u", ".", "Quantity", "(", "[", "self", ".", "waveset", ".", "min", "(", ")", ",", "self", ".", "waveset", ".", "max", "(", ")", "]", ")", "return", "x" ]
Range of `waveset`.
[ "Range", "of", "waveset", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L318-L324
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._validate_wavelengths
def _validate_wavelengths(self, wave): """Validate wavelengths for sampling.""" if wave is None: if self.waveset is None: raise exceptions.SynphotError( 'self.waveset is undefined; ' 'Provide wavelengths for sampling.') wavelengths = self.waveset else: w = self._process_wave_param(wave) utils.validate_wavelengths(w) wavelengths = w * self._internal_wave_unit return wavelengths
python
def _validate_wavelengths(self, wave): """Validate wavelengths for sampling.""" if wave is None: if self.waveset is None: raise exceptions.SynphotError( 'self.waveset is undefined; ' 'Provide wavelengths for sampling.') wavelengths = self.waveset else: w = self._process_wave_param(wave) utils.validate_wavelengths(w) wavelengths = w * self._internal_wave_unit return wavelengths
[ "def", "_validate_wavelengths", "(", "self", ",", "wave", ")", ":", "if", "wave", "is", "None", ":", "if", "self", ".", "waveset", "is", "None", ":", "raise", "exceptions", ".", "SynphotError", "(", "'self.waveset is undefined; '", "'Provide wavelengths for sampling.'", ")", "wavelengths", "=", "self", ".", "waveset", "else", ":", "w", "=", "self", ".", "_process_wave_param", "(", "wave", ")", "utils", ".", "validate_wavelengths", "(", "w", ")", "wavelengths", "=", "w", "*", "self", ".", "_internal_wave_unit", "return", "wavelengths" ]
Validate wavelengths for sampling.
[ "Validate", "wavelengths", "for", "sampling", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L330-L343
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._validate_other_mul_div
def _validate_other_mul_div(other): """Conditions for other to satisfy before mul/div.""" if not isinstance(other, (u.Quantity, numbers.Number, BaseUnitlessSpectrum, SourceSpectrum)): raise exceptions.IncompatibleSources( 'Can only operate on scalar number/Quantity or spectrum') elif (isinstance(other, u.Quantity) and (other.unit.decompose() != u.dimensionless_unscaled or not np.isscalar(other.value) or not isinstance(other.value, numbers.Real))): raise exceptions.IncompatibleSources( 'Can only operate on real scalar dimensionless Quantity') elif (isinstance(other, numbers.Number) and not (np.isscalar(other) and isinstance(other, numbers.Real))): raise exceptions.IncompatibleSources( 'Can only operate on real scalar number')
python
def _validate_other_mul_div(other): """Conditions for other to satisfy before mul/div.""" if not isinstance(other, (u.Quantity, numbers.Number, BaseUnitlessSpectrum, SourceSpectrum)): raise exceptions.IncompatibleSources( 'Can only operate on scalar number/Quantity or spectrum') elif (isinstance(other, u.Quantity) and (other.unit.decompose() != u.dimensionless_unscaled or not np.isscalar(other.value) or not isinstance(other.value, numbers.Real))): raise exceptions.IncompatibleSources( 'Can only operate on real scalar dimensionless Quantity') elif (isinstance(other, numbers.Number) and not (np.isscalar(other) and isinstance(other, numbers.Real))): raise exceptions.IncompatibleSources( 'Can only operate on real scalar number')
[ "def", "_validate_other_mul_div", "(", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "(", "u", ".", "Quantity", ",", "numbers", ".", "Number", ",", "BaseUnitlessSpectrum", ",", "SourceSpectrum", ")", ")", ":", "raise", "exceptions", ".", "IncompatibleSources", "(", "'Can only operate on scalar number/Quantity or spectrum'", ")", "elif", "(", "isinstance", "(", "other", ",", "u", ".", "Quantity", ")", "and", "(", "other", ".", "unit", ".", "decompose", "(", ")", "!=", "u", ".", "dimensionless_unscaled", "or", "not", "np", ".", "isscalar", "(", "other", ".", "value", ")", "or", "not", "isinstance", "(", "other", ".", "value", ",", "numbers", ".", "Real", ")", ")", ")", ":", "raise", "exceptions", ".", "IncompatibleSources", "(", "'Can only operate on real scalar dimensionless Quantity'", ")", "elif", "(", "isinstance", "(", "other", ",", "numbers", ".", "Number", ")", "and", "not", "(", "np", ".", "isscalar", "(", "other", ")", "and", "isinstance", "(", "other", ",", "numbers", ".", "Real", ")", ")", ")", ":", "raise", "exceptions", ".", "IncompatibleSources", "(", "'Can only operate on real scalar number'", ")" ]
Conditions for other to satisfy before mul/div.
[ "Conditions", "for", "other", "to", "satisfy", "before", "mul", "/", "div", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L375-L390
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.integrate
def integrate(self, wavelengths=None, **kwargs): """Perform integration. This uses any analytical integral that the underlying model has (i.e., ``self.model.integral``). If unavailable, it uses the default fall-back integrator set in the ``default_integrator`` configuration item. If wavelengths are provided, flux or throughput is first resampled. This is useful when user wants to integrate at specific end points or use custom spacing; In that case, user can pass in desired sampling array generated with :func:`numpy.linspace`, :func:`numpy.logspace`, etc. If not provided, then `waveset` is used. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. kwargs : dict Optional keywords to ``__call__`` for sampling. Returns ------- result : `~astropy.units.quantity.Quantity` Integrated result. Raises ------ NotImplementedError Invalid default integrator. synphot.exceptions.SynphotError `waveset` is needed but undefined or cannot integrate natively in the given ``flux_unit``. """ # Cannot integrate per Hz units natively across wavelength # without converting them to per Angstrom unit first, so # less misleading to just disallow that option for now. if 'flux_unit' in kwargs: self._validate_flux_unit(kwargs['flux_unit'], wav_only=True) x = self._validate_wavelengths(wavelengths) # TODO: When astropy.modeling.models supports this, need to # make sure that this actually works, and gives correct unit. # https://github.com/astropy/astropy/issues/5033 # https://github.com/astropy/astropy/pull/5108 try: m = self.model.integral except (AttributeError, NotImplementedError): if conf.default_integrator == 'trapezoid': y = self(x, **kwargs) result = abs(np.trapz(y.value, x=x.value)) result_unit = y.unit else: # pragma: no cover raise NotImplementedError( 'Analytic integral not available and default integrator ' '{0} is not supported'.format(conf.default_integrator)) else: # pragma: no cover start = x[0].value stop = x[-1].value result = (m(stop) - m(start)) result_unit = self._internal_flux_unit # Ensure final unit takes account of integration across wavelength if result_unit != units.THROUGHPUT: if result_unit == units.PHOTLAM: result_unit = u.photon / (u.cm**2 * u.s) elif result_unit == units.FLAM: result_unit = u.erg / (u.cm**2 * u.s) else: # pragma: no cover raise NotImplementedError( 'Integration of {0} is not supported'.format(result_unit)) else: # Ideally flux can use this too but unfortunately this # operation results in confusing output unit for flux. result_unit *= self._internal_wave_unit return result * result_unit
python
def integrate(self, wavelengths=None, **kwargs): """Perform integration. This uses any analytical integral that the underlying model has (i.e., ``self.model.integral``). If unavailable, it uses the default fall-back integrator set in the ``default_integrator`` configuration item. If wavelengths are provided, flux or throughput is first resampled. This is useful when user wants to integrate at specific end points or use custom spacing; In that case, user can pass in desired sampling array generated with :func:`numpy.linspace`, :func:`numpy.logspace`, etc. If not provided, then `waveset` is used. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. kwargs : dict Optional keywords to ``__call__`` for sampling. Returns ------- result : `~astropy.units.quantity.Quantity` Integrated result. Raises ------ NotImplementedError Invalid default integrator. synphot.exceptions.SynphotError `waveset` is needed but undefined or cannot integrate natively in the given ``flux_unit``. """ # Cannot integrate per Hz units natively across wavelength # without converting them to per Angstrom unit first, so # less misleading to just disallow that option for now. if 'flux_unit' in kwargs: self._validate_flux_unit(kwargs['flux_unit'], wav_only=True) x = self._validate_wavelengths(wavelengths) # TODO: When astropy.modeling.models supports this, need to # make sure that this actually works, and gives correct unit. # https://github.com/astropy/astropy/issues/5033 # https://github.com/astropy/astropy/pull/5108 try: m = self.model.integral except (AttributeError, NotImplementedError): if conf.default_integrator == 'trapezoid': y = self(x, **kwargs) result = abs(np.trapz(y.value, x=x.value)) result_unit = y.unit else: # pragma: no cover raise NotImplementedError( 'Analytic integral not available and default integrator ' '{0} is not supported'.format(conf.default_integrator)) else: # pragma: no cover start = x[0].value stop = x[-1].value result = (m(stop) - m(start)) result_unit = self._internal_flux_unit # Ensure final unit takes account of integration across wavelength if result_unit != units.THROUGHPUT: if result_unit == units.PHOTLAM: result_unit = u.photon / (u.cm**2 * u.s) elif result_unit == units.FLAM: result_unit = u.erg / (u.cm**2 * u.s) else: # pragma: no cover raise NotImplementedError( 'Integration of {0} is not supported'.format(result_unit)) else: # Ideally flux can use this too but unfortunately this # operation results in confusing output unit for flux. result_unit *= self._internal_wave_unit return result * result_unit
[ "def", "integrate", "(", "self", ",", "wavelengths", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Cannot integrate per Hz units natively across wavelength", "# without converting them to per Angstrom unit first, so", "# less misleading to just disallow that option for now.", "if", "'flux_unit'", "in", "kwargs", ":", "self", ".", "_validate_flux_unit", "(", "kwargs", "[", "'flux_unit'", "]", ",", "wav_only", "=", "True", ")", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", "# TODO: When astropy.modeling.models supports this, need to", "# make sure that this actually works, and gives correct unit.", "# https://github.com/astropy/astropy/issues/5033", "# https://github.com/astropy/astropy/pull/5108", "try", ":", "m", "=", "self", ".", "model", ".", "integral", "except", "(", "AttributeError", ",", "NotImplementedError", ")", ":", "if", "conf", ".", "default_integrator", "==", "'trapezoid'", ":", "y", "=", "self", "(", "x", ",", "*", "*", "kwargs", ")", "result", "=", "abs", "(", "np", ".", "trapz", "(", "y", ".", "value", ",", "x", "=", "x", ".", "value", ")", ")", "result_unit", "=", "y", ".", "unit", "else", ":", "# pragma: no cover", "raise", "NotImplementedError", "(", "'Analytic integral not available and default integrator '", "'{0} is not supported'", ".", "format", "(", "conf", ".", "default_integrator", ")", ")", "else", ":", "# pragma: no cover", "start", "=", "x", "[", "0", "]", ".", "value", "stop", "=", "x", "[", "-", "1", "]", ".", "value", "result", "=", "(", "m", "(", "stop", ")", "-", "m", "(", "start", ")", ")", "result_unit", "=", "self", ".", "_internal_flux_unit", "# Ensure final unit takes account of integration across wavelength", "if", "result_unit", "!=", "units", ".", "THROUGHPUT", ":", "if", "result_unit", "==", "units", ".", "PHOTLAM", ":", "result_unit", "=", "u", ".", "photon", "/", "(", "u", ".", "cm", "**", "2", "*", "u", ".", "s", ")", "elif", "result_unit", "==", "units", ".", "FLAM", ":", "result_unit", "=", "u", ".", "erg", "/", "(", "u", ".", "cm", "**", "2", "*", "u", ".", "s", ")", "else", ":", "# pragma: no cover", "raise", "NotImplementedError", "(", "'Integration of {0} is not supported'", ".", "format", "(", "result_unit", ")", ")", "else", ":", "# Ideally flux can use this too but unfortunately this", "# operation results in confusing output unit for flux.", "result_unit", "*=", "self", ".", "_internal_wave_unit", "return", "result", "*", "result_unit" ]
Perform integration. This uses any analytical integral that the underlying model has (i.e., ``self.model.integral``). If unavailable, it uses the default fall-back integrator set in the ``default_integrator`` configuration item. If wavelengths are provided, flux or throughput is first resampled. This is useful when user wants to integrate at specific end points or use custom spacing; In that case, user can pass in desired sampling array generated with :func:`numpy.linspace`, :func:`numpy.logspace`, etc. If not provided, then `waveset` is used. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. kwargs : dict Optional keywords to ``__call__`` for sampling. Returns ------- result : `~astropy.units.quantity.Quantity` Integrated result. Raises ------ NotImplementedError Invalid default integrator. synphot.exceptions.SynphotError `waveset` is needed but undefined or cannot integrate natively in the given ``flux_unit``.
[ "Perform", "integration", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L410-L493
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.avgwave
def avgwave(self, wavelengths=None): """Calculate the :ref:`average wavelength <synphot-formula-avgwv>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- avg_wave : `~astropy.units.quantity.Quantity` Average wavelength. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value num = np.trapz(y * x, x=x) den = np.trapz(y, x=x) if den == 0: # pragma: no cover avg_wave = 0.0 else: avg_wave = abs(num / den) return avg_wave * self._internal_wave_unit
python
def avgwave(self, wavelengths=None): """Calculate the :ref:`average wavelength <synphot-formula-avgwv>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- avg_wave : `~astropy.units.quantity.Quantity` Average wavelength. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value num = np.trapz(y * x, x=x) den = np.trapz(y, x=x) if den == 0: # pragma: no cover avg_wave = 0.0 else: avg_wave = abs(num / den) return avg_wave * self._internal_wave_unit
[ "def", "avgwave", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", "(", "x", ")", ".", "value", "num", "=", "np", ".", "trapz", "(", "y", "*", "x", ",", "x", "=", "x", ")", "den", "=", "np", ".", "trapz", "(", "y", ",", "x", "=", "x", ")", "if", "den", "==", "0", ":", "# pragma: no cover", "avg_wave", "=", "0.0", "else", ":", "avg_wave", "=", "abs", "(", "num", "/", "den", ")", "return", "avg_wave", "*", "self", ".", "_internal_wave_unit" ]
Calculate the :ref:`average wavelength <synphot-formula-avgwv>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- avg_wave : `~astropy.units.quantity.Quantity` Average wavelength.
[ "Calculate", "the", ":", "ref", ":", "average", "wavelength", "<synphot", "-", "formula", "-", "avgwv", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L495-L521
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.barlam
def barlam(self, wavelengths=None): """Calculate :ref:`mean log wavelength <synphot-formula-barlam>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- bar_lam : `~astropy.units.quantity.Quantity` Mean log wavelength. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value num = np.trapz(y * np.log(x) / x, x=x) den = np.trapz(y / x, x=x) if num == 0 or den == 0: # pragma: no cover bar_lam = 0.0 else: bar_lam = np.exp(abs(num / den)) return bar_lam * self._internal_wave_unit
python
def barlam(self, wavelengths=None): """Calculate :ref:`mean log wavelength <synphot-formula-barlam>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- bar_lam : `~astropy.units.quantity.Quantity` Mean log wavelength. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value num = np.trapz(y * np.log(x) / x, x=x) den = np.trapz(y / x, x=x) if num == 0 or den == 0: # pragma: no cover bar_lam = 0.0 else: bar_lam = np.exp(abs(num / den)) return bar_lam * self._internal_wave_unit
[ "def", "barlam", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", "(", "x", ")", ".", "value", "num", "=", "np", ".", "trapz", "(", "y", "*", "np", ".", "log", "(", "x", ")", "/", "x", ",", "x", "=", "x", ")", "den", "=", "np", ".", "trapz", "(", "y", "/", "x", ",", "x", "=", "x", ")", "if", "num", "==", "0", "or", "den", "==", "0", ":", "# pragma: no cover", "bar_lam", "=", "0.0", "else", ":", "bar_lam", "=", "np", ".", "exp", "(", "abs", "(", "num", "/", "den", ")", ")", "return", "bar_lam", "*", "self", ".", "_internal_wave_unit" ]
Calculate :ref:`mean log wavelength <synphot-formula-barlam>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- bar_lam : `~astropy.units.quantity.Quantity` Mean log wavelength.
[ "Calculate", ":", "ref", ":", "mean", "log", "wavelength", "<synphot", "-", "formula", "-", "barlam", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L523-L549
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.pivot
def pivot(self, wavelengths=None): """Calculate :ref:`pivot wavelength <synphot-formula-pivwv>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- pivwv : `~astropy.units.quantity.Quantity` Pivot wavelength. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value num = np.trapz(y * x, x=x) den = np.trapz(y / x, x=x) if den == 0: # pragma: no cover pivwv = 0.0 else: pivwv = np.sqrt(abs(num / den)) return pivwv * self._internal_wave_unit
python
def pivot(self, wavelengths=None): """Calculate :ref:`pivot wavelength <synphot-formula-pivwv>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- pivwv : `~astropy.units.quantity.Quantity` Pivot wavelength. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value num = np.trapz(y * x, x=x) den = np.trapz(y / x, x=x) if den == 0: # pragma: no cover pivwv = 0.0 else: pivwv = np.sqrt(abs(num / den)) return pivwv * self._internal_wave_unit
[ "def", "pivot", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", "(", "x", ")", ".", "value", "num", "=", "np", ".", "trapz", "(", "y", "*", "x", ",", "x", "=", "x", ")", "den", "=", "np", ".", "trapz", "(", "y", "/", "x", ",", "x", "=", "x", ")", "if", "den", "==", "0", ":", "# pragma: no cover", "pivwv", "=", "0.0", "else", ":", "pivwv", "=", "np", ".", "sqrt", "(", "abs", "(", "num", "/", "den", ")", ")", "return", "pivwv", "*", "self", ".", "_internal_wave_unit" ]
Calculate :ref:`pivot wavelength <synphot-formula-pivwv>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- pivwv : `~astropy.units.quantity.Quantity` Pivot wavelength.
[ "Calculate", ":", "ref", ":", "pivot", "wavelength", "<synphot", "-", "formula", "-", "pivwv", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L551-L577
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.force_extrapolation
def force_extrapolation(self): """Force the underlying model to extrapolate. An example where this is useful: You create a source spectrum with non-default extrapolation behavior and you wish to force the underlying empirical model to extrapolate based on nearest point. .. note:: This is only applicable to `~synphot.models.Empirical1D` model and should still work even if the source spectrum has been redshifted. Returns ------- is_forced : bool `True` if the model is successfully forced to be extrapolated, else `False`. """ # We use _model here in case the spectrum is redshifted. if isinstance(self._model, Empirical1D): self._model.fill_value = np.nan is_forced = True else: is_forced = False return is_forced
python
def force_extrapolation(self): """Force the underlying model to extrapolate. An example where this is useful: You create a source spectrum with non-default extrapolation behavior and you wish to force the underlying empirical model to extrapolate based on nearest point. .. note:: This is only applicable to `~synphot.models.Empirical1D` model and should still work even if the source spectrum has been redshifted. Returns ------- is_forced : bool `True` if the model is successfully forced to be extrapolated, else `False`. """ # We use _model here in case the spectrum is redshifted. if isinstance(self._model, Empirical1D): self._model.fill_value = np.nan is_forced = True else: is_forced = False return is_forced
[ "def", "force_extrapolation", "(", "self", ")", ":", "# We use _model here in case the spectrum is redshifted.", "if", "isinstance", "(", "self", ".", "_model", ",", "Empirical1D", ")", ":", "self", ".", "_model", ".", "fill_value", "=", "np", ".", "nan", "is_forced", "=", "True", "else", ":", "is_forced", "=", "False", "return", "is_forced" ]
Force the underlying model to extrapolate. An example where this is useful: You create a source spectrum with non-default extrapolation behavior and you wish to force the underlying empirical model to extrapolate based on nearest point. .. note:: This is only applicable to `~synphot.models.Empirical1D` model and should still work even if the source spectrum has been redshifted. Returns ------- is_forced : bool `True` if the model is successfully forced to be extrapolated, else `False`.
[ "Force", "the", "underlying", "model", "to", "extrapolate", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L579-L606
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.taper
def taper(self, wavelengths=None): """Taper the spectrum or bandpass. The wavelengths to use for the first and last points are calculated by using the same ratio as for the 2 interior points. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for tapering. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- sp : `BaseSpectrum` Tapered empirical spectrum or bandpass. ``self`` is returned if already tapered (e.g., box model). """ x = self._validate_wavelengths(wavelengths) # Calculate new end points for tapering w1 = x[0] ** 2 / x[1] w2 = x[-1] ** 2 / x[-2] # Special handling for empirical data. # This is to be compatible with ASTROLIB PYSYNPHOT behavior. if isinstance(self._model, Empirical1D): y1 = self._model.lookup_table[0] y2 = self._model.lookup_table[-1] # Other models can just evaluate at new end points else: y1 = self(w1) y2 = self(w2) # Nothing to do if y1 == 0 and y2 == 0: return self # Do we need a deepcopy here? y = self(x) if y1 != 0: x = np.insert(x, 0, w1) y = np.insert(y, 0, 0.0) if y2 != 0: x = np.insert(x, x.size, w2) y = np.insert(y, y.size, 0.0) return self.__class__(Empirical1D, points=x, lookup_table=y)
python
def taper(self, wavelengths=None): """Taper the spectrum or bandpass. The wavelengths to use for the first and last points are calculated by using the same ratio as for the 2 interior points. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for tapering. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- sp : `BaseSpectrum` Tapered empirical spectrum or bandpass. ``self`` is returned if already tapered (e.g., box model). """ x = self._validate_wavelengths(wavelengths) # Calculate new end points for tapering w1 = x[0] ** 2 / x[1] w2 = x[-1] ** 2 / x[-2] # Special handling for empirical data. # This is to be compatible with ASTROLIB PYSYNPHOT behavior. if isinstance(self._model, Empirical1D): y1 = self._model.lookup_table[0] y2 = self._model.lookup_table[-1] # Other models can just evaluate at new end points else: y1 = self(w1) y2 = self(w2) # Nothing to do if y1 == 0 and y2 == 0: return self # Do we need a deepcopy here? y = self(x) if y1 != 0: x = np.insert(x, 0, w1) y = np.insert(y, 0, 0.0) if y2 != 0: x = np.insert(x, x.size, w2) y = np.insert(y, y.size, 0.0) return self.__class__(Empirical1D, points=x, lookup_table=y)
[ "def", "taper", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", "# Calculate new end points for tapering", "w1", "=", "x", "[", "0", "]", "**", "2", "/", "x", "[", "1", "]", "w2", "=", "x", "[", "-", "1", "]", "**", "2", "/", "x", "[", "-", "2", "]", "# Special handling for empirical data.", "# This is to be compatible with ASTROLIB PYSYNPHOT behavior.", "if", "isinstance", "(", "self", ".", "_model", ",", "Empirical1D", ")", ":", "y1", "=", "self", ".", "_model", ".", "lookup_table", "[", "0", "]", "y2", "=", "self", ".", "_model", ".", "lookup_table", "[", "-", "1", "]", "# Other models can just evaluate at new end points", "else", ":", "y1", "=", "self", "(", "w1", ")", "y2", "=", "self", "(", "w2", ")", "# Nothing to do", "if", "y1", "==", "0", "and", "y2", "==", "0", ":", "return", "self", "# Do we need a deepcopy here?", "y", "=", "self", "(", "x", ")", "if", "y1", "!=", "0", ":", "x", "=", "np", ".", "insert", "(", "x", ",", "0", ",", "w1", ")", "y", "=", "np", ".", "insert", "(", "y", ",", "0", ",", "0.0", ")", "if", "y2", "!=", "0", ":", "x", "=", "np", ".", "insert", "(", "x", ",", "x", ".", "size", ",", "w2", ")", "y", "=", "np", ".", "insert", "(", "y", ",", "y", ".", "size", ",", "0.0", ")", "return", "self", ".", "__class__", "(", "Empirical1D", ",", "points", "=", "x", ",", "lookup_table", "=", "y", ")" ]
Taper the spectrum or bandpass. The wavelengths to use for the first and last points are calculated by using the same ratio as for the 2 interior points. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for tapering. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. Returns ------- sp : `BaseSpectrum` Tapered empirical spectrum or bandpass. ``self`` is returned if already tapered (e.g., box model).
[ "Taper", "the", "spectrum", "or", "bandpass", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L608-L657
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._get_arrays
def _get_arrays(self, wavelengths, **kwargs): """Get sampled spectrum or bandpass in user units.""" x = self._validate_wavelengths(wavelengths) y = self(x, **kwargs) if isinstance(wavelengths, u.Quantity): w = x.to(wavelengths.unit, u.spectral()) else: w = x return w, y
python
def _get_arrays(self, wavelengths, **kwargs): """Get sampled spectrum or bandpass in user units.""" x = self._validate_wavelengths(wavelengths) y = self(x, **kwargs) if isinstance(wavelengths, u.Quantity): w = x.to(wavelengths.unit, u.spectral()) else: w = x return w, y
[ "def", "_get_arrays", "(", "self", ",", "wavelengths", ",", "*", "*", "kwargs", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", "y", "=", "self", "(", "x", ",", "*", "*", "kwargs", ")", "if", "isinstance", "(", "wavelengths", ",", "u", ".", "Quantity", ")", ":", "w", "=", "x", ".", "to", "(", "wavelengths", ".", "unit", ",", "u", ".", "spectral", "(", ")", ")", "else", ":", "w", "=", "x", "return", "w", ",", "y" ]
Get sampled spectrum or bandpass in user units.
[ "Get", "sampled", "spectrum", "or", "bandpass", "in", "user", "units", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L659-L669
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum._do_plot
def _do_plot(x, y, title='', xlog=False, ylog=False, left=None, right=None, bottom=None, top=None, save_as=''): # pragma: no cover """Plot worker. Parameters ---------- x, y : `~astropy.units.quantity.Quantity` Wavelength and flux/throughput to plot. kwargs See :func:`plot`. """ try: import matplotlib.pyplot as plt except ImportError: log.error('No matplotlib installation found; plotting disabled ' 'as a result.') return fig, ax = plt.subplots() ax.plot(x, y) # Custom wavelength limits if left is not None: ax.set_xlim(left=left) if right is not None: ax.set_xlim(right=right) # Custom flux/throughput limit if bottom is not None: ax.set_ylim(bottom=bottom) if top is not None: ax.set_ylim(top=top) xu = x.unit if xu.physical_type == 'frequency': ax.set_xlabel('Frequency ({0})'.format(xu)) else: ax.set_xlabel('Wavelength ({0})'.format(xu)) yu = y.unit if yu is u.dimensionless_unscaled: ax.set_ylabel('Unitless') else: ax.set_ylabel('Flux ({0})'.format(yu)) if title: ax.set_title(title) if xlog: ax.set_xscale('log') if ylog: ax.set_yscale('log') plt.draw() if save_as: plt.savefig(save_as) log.info('Plot saved as {0}'.format(save_as))
python
def _do_plot(x, y, title='', xlog=False, ylog=False, left=None, right=None, bottom=None, top=None, save_as=''): # pragma: no cover """Plot worker. Parameters ---------- x, y : `~astropy.units.quantity.Quantity` Wavelength and flux/throughput to plot. kwargs See :func:`plot`. """ try: import matplotlib.pyplot as plt except ImportError: log.error('No matplotlib installation found; plotting disabled ' 'as a result.') return fig, ax = plt.subplots() ax.plot(x, y) # Custom wavelength limits if left is not None: ax.set_xlim(left=left) if right is not None: ax.set_xlim(right=right) # Custom flux/throughput limit if bottom is not None: ax.set_ylim(bottom=bottom) if top is not None: ax.set_ylim(top=top) xu = x.unit if xu.physical_type == 'frequency': ax.set_xlabel('Frequency ({0})'.format(xu)) else: ax.set_xlabel('Wavelength ({0})'.format(xu)) yu = y.unit if yu is u.dimensionless_unscaled: ax.set_ylabel('Unitless') else: ax.set_ylabel('Flux ({0})'.format(yu)) if title: ax.set_title(title) if xlog: ax.set_xscale('log') if ylog: ax.set_yscale('log') plt.draw() if save_as: plt.savefig(save_as) log.info('Plot saved as {0}'.format(save_as))
[ "def", "_do_plot", "(", "x", ",", "y", ",", "title", "=", "''", ",", "xlog", "=", "False", ",", "ylog", "=", "False", ",", "left", "=", "None", ",", "right", "=", "None", ",", "bottom", "=", "None", ",", "top", "=", "None", ",", "save_as", "=", "''", ")", ":", "# pragma: no cover", "try", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "except", "ImportError", ":", "log", ".", "error", "(", "'No matplotlib installation found; plotting disabled '", "'as a result.'", ")", "return", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "ax", ".", "plot", "(", "x", ",", "y", ")", "# Custom wavelength limits", "if", "left", "is", "not", "None", ":", "ax", ".", "set_xlim", "(", "left", "=", "left", ")", "if", "right", "is", "not", "None", ":", "ax", ".", "set_xlim", "(", "right", "=", "right", ")", "# Custom flux/throughput limit", "if", "bottom", "is", "not", "None", ":", "ax", ".", "set_ylim", "(", "bottom", "=", "bottom", ")", "if", "top", "is", "not", "None", ":", "ax", ".", "set_ylim", "(", "top", "=", "top", ")", "xu", "=", "x", ".", "unit", "if", "xu", ".", "physical_type", "==", "'frequency'", ":", "ax", ".", "set_xlabel", "(", "'Frequency ({0})'", ".", "format", "(", "xu", ")", ")", "else", ":", "ax", ".", "set_xlabel", "(", "'Wavelength ({0})'", ".", "format", "(", "xu", ")", ")", "yu", "=", "y", ".", "unit", "if", "yu", "is", "u", ".", "dimensionless_unscaled", ":", "ax", ".", "set_ylabel", "(", "'Unitless'", ")", "else", ":", "ax", ".", "set_ylabel", "(", "'Flux ({0})'", ".", "format", "(", "yu", ")", ")", "if", "title", ":", "ax", ".", "set_title", "(", "title", ")", "if", "xlog", ":", "ax", ".", "set_xscale", "(", "'log'", ")", "if", "ylog", ":", "ax", ".", "set_yscale", "(", "'log'", ")", "plt", ".", "draw", "(", ")", "if", "save_as", ":", "plt", ".", "savefig", "(", "save_as", ")", "log", ".", "info", "(", "'Plot saved as {0}'", ".", "format", "(", "save_as", ")", ")" ]
Plot worker. Parameters ---------- x, y : `~astropy.units.quantity.Quantity` Wavelength and flux/throughput to plot. kwargs See :func:`plot`.
[ "Plot", "worker", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L672-L732
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSpectrum.plot
def plot(self, wavelengths=None, **kwargs): # pragma: no cover """Plot the spectrum. .. note:: Uses ``matplotlib``. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. title : str Plot title. xlog, ylog : bool Plot X and Y axes, respectively, in log scale. Default is linear scale. left, right : `None` or number Minimum and maximum wavelengths to plot. If `None`, uses the whole range. If a number is given, must be in Angstrom. bottom, top : `None` or number Minimum and maximum flux/throughput to plot. If `None`, uses the whole range. If a number is given, must be in internal unit. save_as : str Save the plot to an image file. The file type is automatically determined by given file extension. Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ w, y = self._get_arrays(wavelengths) self._do_plot(w, y, **kwargs)
python
def plot(self, wavelengths=None, **kwargs): # pragma: no cover """Plot the spectrum. .. note:: Uses ``matplotlib``. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. title : str Plot title. xlog, ylog : bool Plot X and Y axes, respectively, in log scale. Default is linear scale. left, right : `None` or number Minimum and maximum wavelengths to plot. If `None`, uses the whole range. If a number is given, must be in Angstrom. bottom, top : `None` or number Minimum and maximum flux/throughput to plot. If `None`, uses the whole range. If a number is given, must be in internal unit. save_as : str Save the plot to an image file. The file type is automatically determined by given file extension. Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ w, y = self._get_arrays(wavelengths) self._do_plot(w, y, **kwargs)
[ "def", "plot", "(", "self", ",", "wavelengths", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# pragma: no cover", "w", ",", "y", "=", "self", ".", "_get_arrays", "(", "wavelengths", ")", "self", ".", "_do_plot", "(", "w", ",", "y", ",", "*", "*", "kwargs", ")" ]
Plot the spectrum. .. note:: Uses ``matplotlib``. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. title : str Plot title. xlog, ylog : bool Plot X and Y axes, respectively, in log scale. Default is linear scale. left, right : `None` or number Minimum and maximum wavelengths to plot. If `None`, uses the whole range. If a number is given, must be in Angstrom. bottom, top : `None` or number Minimum and maximum flux/throughput to plot. If `None`, uses the whole range. If a number is given, must be in internal unit. save_as : str Save the plot to an image file. The file type is automatically determined by given file extension. Raises ------ synphot.exceptions.SynphotError Invalid inputs.
[ "Plot", "the", "spectrum", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L734-L774
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSourceSpectrum._validate_flux_unit
def _validate_flux_unit(new_unit, wav_only=False): """Make sure flux unit is valid.""" new_unit = units.validate_unit(new_unit) acceptable_types = ['spectral flux density wav', 'photon flux density wav'] acceptable_names = ['PHOTLAM', 'FLAM'] if not wav_only: # Include per Hz units acceptable_types += ['spectral flux density', 'photon flux density'] acceptable_names += ['PHOTNU', 'FNU', 'Jy'] if new_unit.physical_type not in acceptable_types: raise exceptions.SynphotError( 'Source spectrum cannot operate in {0}. Acceptable units: ' '{1}'.format(new_unit, ','.join(acceptable_names))) return new_unit
python
def _validate_flux_unit(new_unit, wav_only=False): """Make sure flux unit is valid.""" new_unit = units.validate_unit(new_unit) acceptable_types = ['spectral flux density wav', 'photon flux density wav'] acceptable_names = ['PHOTLAM', 'FLAM'] if not wav_only: # Include per Hz units acceptable_types += ['spectral flux density', 'photon flux density'] acceptable_names += ['PHOTNU', 'FNU', 'Jy'] if new_unit.physical_type not in acceptable_types: raise exceptions.SynphotError( 'Source spectrum cannot operate in {0}. Acceptable units: ' '{1}'.format(new_unit, ','.join(acceptable_names))) return new_unit
[ "def", "_validate_flux_unit", "(", "new_unit", ",", "wav_only", "=", "False", ")", ":", "new_unit", "=", "units", ".", "validate_unit", "(", "new_unit", ")", "acceptable_types", "=", "[", "'spectral flux density wav'", ",", "'photon flux density wav'", "]", "acceptable_names", "=", "[", "'PHOTLAM'", ",", "'FLAM'", "]", "if", "not", "wav_only", ":", "# Include per Hz units", "acceptable_types", "+=", "[", "'spectral flux density'", ",", "'photon flux density'", "]", "acceptable_names", "+=", "[", "'PHOTNU'", ",", "'FNU'", ",", "'Jy'", "]", "if", "new_unit", ".", "physical_type", "not", "in", "acceptable_types", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Source spectrum cannot operate in {0}. Acceptable units: '", "'{1}'", ".", "format", "(", "new_unit", ",", "','", ".", "join", "(", "acceptable_names", ")", ")", ")", "return", "new_unit" ]
Make sure flux unit is valid.
[ "Make", "sure", "flux", "unit", "is", "valid", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L785-L802
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseSourceSpectrum.normalize
def normalize(self, renorm_val, band=None, wavelengths=None, force=False, area=None, vegaspec=None): """Renormalize the spectrum to the given Quantity and band. .. warning:: Redshift attribute (``z``) is reset to 0 in the normalized spectrum even if ``self.z`` is non-zero. This is because the normalization simply adds a scale factor to the existing composite model. This is confusing but should not affect the flux sampling. Parameters ---------- renorm_val : number or `~astropy.units.quantity.Quantity` Value to renormalize the spectrum to. If not a Quantity, assumed to be in internal unit. band : `SpectralElement` Bandpass to use in renormalization. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for renormalization. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. force : bool By default (`False`), renormalization is only done when band wavelength limits are within ``self`` or at least 99% of the flux is within the overlap. Set to `True` to force renormalization for partial overlap (this changes the underlying model of ``self`` to always extrapolate, if applicable). Disjoint bandpass raises an exception regardless. area, vegaspec See :func:`~synphot.units.convert_flux`. Returns ------- sp : obj Renormalized spectrum. Raises ------ synphot.exceptions.DisjointError Renormalization band does not overlap with ``self``. synphot.exceptions.PartialOverlap Renormalization band only partially overlaps with ``self`` and significant amount of flux falls outside the overlap. synphot.exceptions.SynphotError Invalid inputs or calculation failed. """ warndict = {} if band is None: sp = self else: if not isinstance(band, SpectralElement): raise exceptions.SynphotError('Invalid bandpass.') stat = band.check_overlap(self, wavelengths=wavelengths) if stat == 'none': raise exceptions.DisjointError( 'Spectrum and renormalization band are disjoint.') elif 'partial' in stat: if stat == 'partial_most': warn_str = 'At least' elif stat == 'partial_notmost' and force: warn_str = 'Less than' else: raise exceptions.PartialOverlap( 'Spectrum and renormalization band do not fully ' 'overlap. You may use force=True to force the ' 'renormalization to proceed.') warn_str = ( 'Spectrum is not defined everywhere in renormalization ' 'bandpass. {0} 99% of the band throughput has ' 'data. Spectrum will be').format(warn_str) if self.force_extrapolation(): warn_str = ('{0} extrapolated at constant ' 'value.').format(warn_str) else: warn_str = ('{0} evaluated outside pre-defined ' 'waveset.').format(warn_str) warnings.warn(warn_str, AstropyUserWarning) warndict['PartialRenorm'] = warn_str elif stat != 'full': # pragma: no cover raise exceptions.SynphotError( 'Overlap result of {0} is unexpected.'.format(stat)) sp = self.__mul__(band) if not isinstance(renorm_val, u.Quantity): renorm_val = renorm_val * self._internal_flux_unit renorm_unit_name = renorm_val.unit.to_string() w = sp._validate_wavelengths(wavelengths) if (renorm_val.unit == u.count or renorm_unit_name == units.OBMAG.to_string()): # Special handling for non-density units flux_tmp = sp(w, flux_unit=u.count, area=area) totalflux = flux_tmp.sum().value stdflux = 1.0 else: totalflux = sp.integrate(wavelengths=wavelengths).value # VEGAMAG if renorm_unit_name == units.VEGAMAG.to_string(): if not isinstance(vegaspec, SourceSpectrum): raise exceptions.SynphotError( 'Vega spectrum is missing.') stdspec = vegaspec # Magnitude flux-density units elif renorm_val.unit in (u.STmag, u.ABmag): stdspec = SourceSpectrum( ConstFlux1D, amplitude=(0 * renorm_val.unit)) # Linear flux-density units else: stdspec = SourceSpectrum( ConstFlux1D, amplitude=(1 * renorm_val.unit)) if band is None: # TODO: Cannot get this to agree with results # from using a very large box bandpass. # stdflux = stdspec.integrate(wavelengths=w).value raise NotImplementedError('Must provide a bandpass') else: up = stdspec * band stdflux = up.integrate(wavelengths=wavelengths).value utils.validate_totalflux(totalflux) # Renormalize in magnitudes if (renorm_val.unit.decompose() == u.mag or isinstance(renorm_val.unit, u.LogUnit)): const = renorm_val.value + (2.5 * np.log10(totalflux / stdflux)) newsp = self.__mul__(10**(-0.4 * const)) # Renormalize in linear flux units else: const = renorm_val.value * (stdflux / totalflux) newsp = self.__mul__(const) newsp.warnings = warndict return newsp
python
def normalize(self, renorm_val, band=None, wavelengths=None, force=False, area=None, vegaspec=None): """Renormalize the spectrum to the given Quantity and band. .. warning:: Redshift attribute (``z``) is reset to 0 in the normalized spectrum even if ``self.z`` is non-zero. This is because the normalization simply adds a scale factor to the existing composite model. This is confusing but should not affect the flux sampling. Parameters ---------- renorm_val : number or `~astropy.units.quantity.Quantity` Value to renormalize the spectrum to. If not a Quantity, assumed to be in internal unit. band : `SpectralElement` Bandpass to use in renormalization. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for renormalization. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. force : bool By default (`False`), renormalization is only done when band wavelength limits are within ``self`` or at least 99% of the flux is within the overlap. Set to `True` to force renormalization for partial overlap (this changes the underlying model of ``self`` to always extrapolate, if applicable). Disjoint bandpass raises an exception regardless. area, vegaspec See :func:`~synphot.units.convert_flux`. Returns ------- sp : obj Renormalized spectrum. Raises ------ synphot.exceptions.DisjointError Renormalization band does not overlap with ``self``. synphot.exceptions.PartialOverlap Renormalization band only partially overlaps with ``self`` and significant amount of flux falls outside the overlap. synphot.exceptions.SynphotError Invalid inputs or calculation failed. """ warndict = {} if band is None: sp = self else: if not isinstance(band, SpectralElement): raise exceptions.SynphotError('Invalid bandpass.') stat = band.check_overlap(self, wavelengths=wavelengths) if stat == 'none': raise exceptions.DisjointError( 'Spectrum and renormalization band are disjoint.') elif 'partial' in stat: if stat == 'partial_most': warn_str = 'At least' elif stat == 'partial_notmost' and force: warn_str = 'Less than' else: raise exceptions.PartialOverlap( 'Spectrum and renormalization band do not fully ' 'overlap. You may use force=True to force the ' 'renormalization to proceed.') warn_str = ( 'Spectrum is not defined everywhere in renormalization ' 'bandpass. {0} 99% of the band throughput has ' 'data. Spectrum will be').format(warn_str) if self.force_extrapolation(): warn_str = ('{0} extrapolated at constant ' 'value.').format(warn_str) else: warn_str = ('{0} evaluated outside pre-defined ' 'waveset.').format(warn_str) warnings.warn(warn_str, AstropyUserWarning) warndict['PartialRenorm'] = warn_str elif stat != 'full': # pragma: no cover raise exceptions.SynphotError( 'Overlap result of {0} is unexpected.'.format(stat)) sp = self.__mul__(band) if not isinstance(renorm_val, u.Quantity): renorm_val = renorm_val * self._internal_flux_unit renorm_unit_name = renorm_val.unit.to_string() w = sp._validate_wavelengths(wavelengths) if (renorm_val.unit == u.count or renorm_unit_name == units.OBMAG.to_string()): # Special handling for non-density units flux_tmp = sp(w, flux_unit=u.count, area=area) totalflux = flux_tmp.sum().value stdflux = 1.0 else: totalflux = sp.integrate(wavelengths=wavelengths).value # VEGAMAG if renorm_unit_name == units.VEGAMAG.to_string(): if not isinstance(vegaspec, SourceSpectrum): raise exceptions.SynphotError( 'Vega spectrum is missing.') stdspec = vegaspec # Magnitude flux-density units elif renorm_val.unit in (u.STmag, u.ABmag): stdspec = SourceSpectrum( ConstFlux1D, amplitude=(0 * renorm_val.unit)) # Linear flux-density units else: stdspec = SourceSpectrum( ConstFlux1D, amplitude=(1 * renorm_val.unit)) if band is None: # TODO: Cannot get this to agree with results # from using a very large box bandpass. # stdflux = stdspec.integrate(wavelengths=w).value raise NotImplementedError('Must provide a bandpass') else: up = stdspec * band stdflux = up.integrate(wavelengths=wavelengths).value utils.validate_totalflux(totalflux) # Renormalize in magnitudes if (renorm_val.unit.decompose() == u.mag or isinstance(renorm_val.unit, u.LogUnit)): const = renorm_val.value + (2.5 * np.log10(totalflux / stdflux)) newsp = self.__mul__(10**(-0.4 * const)) # Renormalize in linear flux units else: const = renorm_val.value * (stdflux / totalflux) newsp = self.__mul__(const) newsp.warnings = warndict return newsp
[ "def", "normalize", "(", "self", ",", "renorm_val", ",", "band", "=", "None", ",", "wavelengths", "=", "None", ",", "force", "=", "False", ",", "area", "=", "None", ",", "vegaspec", "=", "None", ")", ":", "warndict", "=", "{", "}", "if", "band", "is", "None", ":", "sp", "=", "self", "else", ":", "if", "not", "isinstance", "(", "band", ",", "SpectralElement", ")", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Invalid bandpass.'", ")", "stat", "=", "band", ".", "check_overlap", "(", "self", ",", "wavelengths", "=", "wavelengths", ")", "if", "stat", "==", "'none'", ":", "raise", "exceptions", ".", "DisjointError", "(", "'Spectrum and renormalization band are disjoint.'", ")", "elif", "'partial'", "in", "stat", ":", "if", "stat", "==", "'partial_most'", ":", "warn_str", "=", "'At least'", "elif", "stat", "==", "'partial_notmost'", "and", "force", ":", "warn_str", "=", "'Less than'", "else", ":", "raise", "exceptions", ".", "PartialOverlap", "(", "'Spectrum and renormalization band do not fully '", "'overlap. You may use force=True to force the '", "'renormalization to proceed.'", ")", "warn_str", "=", "(", "'Spectrum is not defined everywhere in renormalization '", "'bandpass. {0} 99% of the band throughput has '", "'data. Spectrum will be'", ")", ".", "format", "(", "warn_str", ")", "if", "self", ".", "force_extrapolation", "(", ")", ":", "warn_str", "=", "(", "'{0} extrapolated at constant '", "'value.'", ")", ".", "format", "(", "warn_str", ")", "else", ":", "warn_str", "=", "(", "'{0} evaluated outside pre-defined '", "'waveset.'", ")", ".", "format", "(", "warn_str", ")", "warnings", ".", "warn", "(", "warn_str", ",", "AstropyUserWarning", ")", "warndict", "[", "'PartialRenorm'", "]", "=", "warn_str", "elif", "stat", "!=", "'full'", ":", "# pragma: no cover", "raise", "exceptions", ".", "SynphotError", "(", "'Overlap result of {0} is unexpected.'", ".", "format", "(", "stat", ")", ")", "sp", "=", "self", ".", "__mul__", "(", "band", ")", "if", "not", "isinstance", "(", "renorm_val", ",", "u", ".", "Quantity", ")", ":", "renorm_val", "=", "renorm_val", "*", "self", ".", "_internal_flux_unit", "renorm_unit_name", "=", "renorm_val", ".", "unit", ".", "to_string", "(", ")", "w", "=", "sp", ".", "_validate_wavelengths", "(", "wavelengths", ")", "if", "(", "renorm_val", ".", "unit", "==", "u", ".", "count", "or", "renorm_unit_name", "==", "units", ".", "OBMAG", ".", "to_string", "(", ")", ")", ":", "# Special handling for non-density units", "flux_tmp", "=", "sp", "(", "w", ",", "flux_unit", "=", "u", ".", "count", ",", "area", "=", "area", ")", "totalflux", "=", "flux_tmp", ".", "sum", "(", ")", ".", "value", "stdflux", "=", "1.0", "else", ":", "totalflux", "=", "sp", ".", "integrate", "(", "wavelengths", "=", "wavelengths", ")", ".", "value", "# VEGAMAG", "if", "renorm_unit_name", "==", "units", ".", "VEGAMAG", ".", "to_string", "(", ")", ":", "if", "not", "isinstance", "(", "vegaspec", ",", "SourceSpectrum", ")", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Vega spectrum is missing.'", ")", "stdspec", "=", "vegaspec", "# Magnitude flux-density units", "elif", "renorm_val", ".", "unit", "in", "(", "u", ".", "STmag", ",", "u", ".", "ABmag", ")", ":", "stdspec", "=", "SourceSpectrum", "(", "ConstFlux1D", ",", "amplitude", "=", "(", "0", "*", "renorm_val", ".", "unit", ")", ")", "# Linear flux-density units", "else", ":", "stdspec", "=", "SourceSpectrum", "(", "ConstFlux1D", ",", "amplitude", "=", "(", "1", "*", "renorm_val", ".", "unit", ")", ")", "if", "band", "is", "None", ":", "# TODO: Cannot get this to agree with results", "# from using a very large box bandpass.", "# stdflux = stdspec.integrate(wavelengths=w).value", "raise", "NotImplementedError", "(", "'Must provide a bandpass'", ")", "else", ":", "up", "=", "stdspec", "*", "band", "stdflux", "=", "up", ".", "integrate", "(", "wavelengths", "=", "wavelengths", ")", ".", "value", "utils", ".", "validate_totalflux", "(", "totalflux", ")", "# Renormalize in magnitudes", "if", "(", "renorm_val", ".", "unit", ".", "decompose", "(", ")", "==", "u", ".", "mag", "or", "isinstance", "(", "renorm_val", ".", "unit", ",", "u", ".", "LogUnit", ")", ")", ":", "const", "=", "renorm_val", ".", "value", "+", "(", "2.5", "*", "np", ".", "log10", "(", "totalflux", "/", "stdflux", ")", ")", "newsp", "=", "self", ".", "__mul__", "(", "10", "**", "(", "-", "0.4", "*", "const", ")", ")", "# Renormalize in linear flux units", "else", ":", "const", "=", "renorm_val", ".", "value", "*", "(", "stdflux", "/", "totalflux", ")", "newsp", "=", "self", ".", "__mul__", "(", "const", ")", "newsp", ".", "warnings", "=", "warndict", "return", "newsp" ]
Renormalize the spectrum to the given Quantity and band. .. warning:: Redshift attribute (``z``) is reset to 0 in the normalized spectrum even if ``self.z`` is non-zero. This is because the normalization simply adds a scale factor to the existing composite model. This is confusing but should not affect the flux sampling. Parameters ---------- renorm_val : number or `~astropy.units.quantity.Quantity` Value to renormalize the spectrum to. If not a Quantity, assumed to be in internal unit. band : `SpectralElement` Bandpass to use in renormalization. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for renormalization. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. force : bool By default (`False`), renormalization is only done when band wavelength limits are within ``self`` or at least 99% of the flux is within the overlap. Set to `True` to force renormalization for partial overlap (this changes the underlying model of ``self`` to always extrapolate, if applicable). Disjoint bandpass raises an exception regardless. area, vegaspec See :func:`~synphot.units.convert_flux`. Returns ------- sp : obj Renormalized spectrum. Raises ------ synphot.exceptions.DisjointError Renormalization band does not overlap with ``self``. synphot.exceptions.PartialOverlap Renormalization band only partially overlaps with ``self`` and significant amount of flux falls outside the overlap. synphot.exceptions.SynphotError Invalid inputs or calculation failed.
[ "Renormalize", "the", "spectrum", "to", "the", "given", "Quantity", "and", "band", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L837-L995
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum._process_flux_param
def _process_flux_param(self, pval, wave): """Process individual model parameter representing flux.""" if isinstance(pval, u.Quantity): self._validate_flux_unit(pval.unit) outval = units.convert_flux(self._redshift_model(wave), pval, self._internal_flux_unit).value else: # Assume already in internal unit outval = pval return outval
python
def _process_flux_param(self, pval, wave): """Process individual model parameter representing flux.""" if isinstance(pval, u.Quantity): self._validate_flux_unit(pval.unit) outval = units.convert_flux(self._redshift_model(wave), pval, self._internal_flux_unit).value else: # Assume already in internal unit outval = pval return outval
[ "def", "_process_flux_param", "(", "self", ",", "pval", ",", "wave", ")", ":", "if", "isinstance", "(", "pval", ",", "u", ".", "Quantity", ")", ":", "self", ".", "_validate_flux_unit", "(", "pval", ".", "unit", ")", "outval", "=", "units", ".", "convert_flux", "(", "self", ".", "_redshift_model", "(", "wave", ")", ",", "pval", ",", "self", ".", "_internal_flux_unit", ")", ".", "value", "else", ":", "# Assume already in internal unit", "outval", "=", "pval", "return", "outval" ]
Process individual model parameter representing flux.
[ "Process", "individual", "model", "parameter", "representing", "flux", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1024-L1032
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum.model
def model(self): """Model of the spectrum with given redshift.""" if self.z == 0: m = self._model else: # wavelength if self._internal_wave_unit.physical_type == 'length': rs = self._redshift_model.inverse # frequency or wavenumber # NOTE: This will never execute as long as internal wavelength # unit remains Angstrom. else: # pragma: no cover rs = self._redshift_model if self.z_type == 'wavelength_only': m = rs | self._model else: # conserve_flux m = rs | self._model | self._redshift_flux_model return m
python
def model(self): """Model of the spectrum with given redshift.""" if self.z == 0: m = self._model else: # wavelength if self._internal_wave_unit.physical_type == 'length': rs = self._redshift_model.inverse # frequency or wavenumber # NOTE: This will never execute as long as internal wavelength # unit remains Angstrom. else: # pragma: no cover rs = self._redshift_model if self.z_type == 'wavelength_only': m = rs | self._model else: # conserve_flux m = rs | self._model | self._redshift_flux_model return m
[ "def", "model", "(", "self", ")", ":", "if", "self", ".", "z", "==", "0", ":", "m", "=", "self", ".", "_model", "else", ":", "# wavelength", "if", "self", ".", "_internal_wave_unit", ".", "physical_type", "==", "'length'", ":", "rs", "=", "self", ".", "_redshift_model", ".", "inverse", "# frequency or wavenumber", "# NOTE: This will never execute as long as internal wavelength", "# unit remains Angstrom.", "else", ":", "# pragma: no cover", "rs", "=", "self", ".", "_redshift_model", "if", "self", ".", "z_type", "==", "'wavelength_only'", ":", "m", "=", "rs", "|", "self", ".", "_model", "else", ":", "# conserve_flux", "m", "=", "rs", "|", "self", ".", "_model", "|", "self", ".", "_redshift_flux_model", "return", "m" ]
Model of the spectrum with given redshift.
[ "Model", "of", "the", "spectrum", "with", "given", "redshift", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1035-L1054
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum.z
def z(self, what): """Change redshift.""" if not isinstance(what, numbers.Real): raise exceptions.SynphotError( 'Redshift must be a real scalar number.') self._z = float(what) self._redshift_model = RedshiftScaleFactor(self._z) if self.z_type == 'wavelength_only': self._redshift_flux_model = None else: # conserve_flux self._redshift_flux_model = Scale(1 / (1 + self._z))
python
def z(self, what): """Change redshift.""" if not isinstance(what, numbers.Real): raise exceptions.SynphotError( 'Redshift must be a real scalar number.') self._z = float(what) self._redshift_model = RedshiftScaleFactor(self._z) if self.z_type == 'wavelength_only': self._redshift_flux_model = None else: # conserve_flux self._redshift_flux_model = Scale(1 / (1 + self._z))
[ "def", "z", "(", "self", ",", "what", ")", ":", "if", "not", "isinstance", "(", "what", ",", "numbers", ".", "Real", ")", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Redshift must be a real scalar number.'", ")", "self", ".", "_z", "=", "float", "(", "what", ")", "self", ".", "_redshift_model", "=", "RedshiftScaleFactor", "(", "self", ".", "_z", ")", "if", "self", ".", "z_type", "==", "'wavelength_only'", ":", "self", ".", "_redshift_flux_model", "=", "None", "else", ":", "# conserve_flux", "self", ".", "_redshift_flux_model", "=", "Scale", "(", "1", "/", "(", "1", "+", "self", ".", "_z", ")", ")" ]
Change redshift.
[ "Change", "redshift", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1062-L1072
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum._validate_other_add_sub
def _validate_other_add_sub(self, other): """Conditions for other to satisfy before add/sub.""" if not isinstance(other, self.__class__): raise exceptions.IncompatibleSources( 'Can only operate on {0}.'.format(self.__class__.__name__))
python
def _validate_other_add_sub(self, other): """Conditions for other to satisfy before add/sub.""" if not isinstance(other, self.__class__): raise exceptions.IncompatibleSources( 'Can only operate on {0}.'.format(self.__class__.__name__))
[ "def", "_validate_other_add_sub", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "self", ".", "__class__", ")", ":", "raise", "exceptions", ".", "IncompatibleSources", "(", "'Can only operate on {0}.'", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ")", ")" ]
Conditions for other to satisfy before add/sub.
[ "Conditions", "for", "other", "to", "satisfy", "before", "add", "/", "sub", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1092-L1096
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum.plot
def plot(self, wavelengths=None, flux_unit=None, area=None, vegaspec=None, **kwargs): # pragma: no cover """Plot the spectrum. .. note:: Uses :mod:`matplotlib`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit for plotting. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict See :func:`BaseSpectrum.plot`. Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ w, y = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area, vegaspec=vegaspec) self._do_plot(w, y, **kwargs)
python
def plot(self, wavelengths=None, flux_unit=None, area=None, vegaspec=None, **kwargs): # pragma: no cover """Plot the spectrum. .. note:: Uses :mod:`matplotlib`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit for plotting. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict See :func:`BaseSpectrum.plot`. Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ w, y = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area, vegaspec=vegaspec) self._do_plot(w, y, **kwargs)
[ "def", "plot", "(", "self", ",", "wavelengths", "=", "None", ",", "flux_unit", "=", "None", ",", "area", "=", "None", ",", "vegaspec", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# pragma: no cover", "w", ",", "y", "=", "self", ".", "_get_arrays", "(", "wavelengths", ",", "flux_unit", "=", "flux_unit", ",", "area", "=", "area", ",", "vegaspec", "=", "vegaspec", ")", "self", ".", "_do_plot", "(", "w", ",", "y", ",", "*", "*", "kwargs", ")" ]
Plot the spectrum. .. note:: Uses :mod:`matplotlib`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit for plotting. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict See :func:`BaseSpectrum.plot`. Raises ------ synphot.exceptions.SynphotError Invalid inputs.
[ "Plot", "the", "spectrum", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1141-L1172
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum.to_fits
def to_fits(self, filename, wavelengths=None, flux_unit=None, area=None, vegaspec=None, **kwargs): """Write the spectrum to a FITS file. Parameters ---------- filename : str Output filename. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit before written out. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict Keywords accepted by :func:`~synphot.specio.write_fits_spec`. """ w, y = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area, vegaspec=vegaspec) # There are some standard keywords that should be added # to the extension header. bkeys = {'tdisp1': 'G15.7', 'tdisp2': 'G15.7'} if 'expr' in self.meta: bkeys['expr'] = (self.meta['expr'], 'synphot expression') if 'ext_header' in kwargs: kwargs['ext_header'].update(bkeys) else: kwargs['ext_header'] = bkeys specio.write_fits_spec(filename, w, y, **kwargs)
python
def to_fits(self, filename, wavelengths=None, flux_unit=None, area=None, vegaspec=None, **kwargs): """Write the spectrum to a FITS file. Parameters ---------- filename : str Output filename. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit before written out. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict Keywords accepted by :func:`~synphot.specio.write_fits_spec`. """ w, y = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area, vegaspec=vegaspec) # There are some standard keywords that should be added # to the extension header. bkeys = {'tdisp1': 'G15.7', 'tdisp2': 'G15.7'} if 'expr' in self.meta: bkeys['expr'] = (self.meta['expr'], 'synphot expression') if 'ext_header' in kwargs: kwargs['ext_header'].update(bkeys) else: kwargs['ext_header'] = bkeys specio.write_fits_spec(filename, w, y, **kwargs)
[ "def", "to_fits", "(", "self", ",", "filename", ",", "wavelengths", "=", "None", ",", "flux_unit", "=", "None", ",", "area", "=", "None", ",", "vegaspec", "=", "None", ",", "*", "*", "kwargs", ")", ":", "w", ",", "y", "=", "self", ".", "_get_arrays", "(", "wavelengths", ",", "flux_unit", "=", "flux_unit", ",", "area", "=", "area", ",", "vegaspec", "=", "vegaspec", ")", "# There are some standard keywords that should be added", "# to the extension header.", "bkeys", "=", "{", "'tdisp1'", ":", "'G15.7'", ",", "'tdisp2'", ":", "'G15.7'", "}", "if", "'expr'", "in", "self", ".", "meta", ":", "bkeys", "[", "'expr'", "]", "=", "(", "self", ".", "meta", "[", "'expr'", "]", ",", "'synphot expression'", ")", "if", "'ext_header'", "in", "kwargs", ":", "kwargs", "[", "'ext_header'", "]", ".", "update", "(", "bkeys", ")", "else", ":", "kwargs", "[", "'ext_header'", "]", "=", "bkeys", "specio", ".", "write_fits_spec", "(", "filename", ",", "w", ",", "y", ",", "*", "*", "kwargs", ")" ]
Write the spectrum to a FITS file. Parameters ---------- filename : str Output filename. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit before written out. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict Keywords accepted by :func:`~synphot.specio.write_fits_spec`.
[ "Write", "the", "spectrum", "to", "a", "FITS", "file", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1174-L1214
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum.from_file
def from_file(cls, filename, keep_neg=False, **kwargs): """Create a spectrum from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Spectrum filename. keep_neg : bool See `~synphot.models.Empirical1D`. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- sp : `SourceSpectrum` Empirical spectrum. """ header, wavelengths, fluxes = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=fluxes, keep_neg=keep_neg, meta={'header': header})
python
def from_file(cls, filename, keep_neg=False, **kwargs): """Create a spectrum from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Spectrum filename. keep_neg : bool See `~synphot.models.Empirical1D`. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- sp : `SourceSpectrum` Empirical spectrum. """ header, wavelengths, fluxes = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=fluxes, keep_neg=keep_neg, meta={'header': header})
[ "def", "from_file", "(", "cls", ",", "filename", ",", "keep_neg", "=", "False", ",", "*", "*", "kwargs", ")", ":", "header", ",", "wavelengths", ",", "fluxes", "=", "specio", ".", "read_spec", "(", "filename", ",", "*", "*", "kwargs", ")", "return", "cls", "(", "Empirical1D", ",", "points", "=", "wavelengths", ",", "lookup_table", "=", "fluxes", ",", "keep_neg", "=", "keep_neg", ",", "meta", "=", "{", "'header'", ":", "header", "}", ")" ]
Create a spectrum from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Spectrum filename. keep_neg : bool See `~synphot.models.Empirical1D`. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- sp : `SourceSpectrum` Empirical spectrum.
[ "Create", "a", "spectrum", "from", "file", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1217-L1244
spacetelescope/synphot_refactor
synphot/spectrum.py
SourceSpectrum.from_vega
def from_vega(cls, **kwargs): """Load :ref:`Vega spectrum <synphot-vega-spec>`. Parameters ---------- kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_remote_spec`. Returns ------- vegaspec : `SourceSpectrum` Empirical Vega spectrum. """ filename = conf.vega_file header, wavelengths, fluxes = specio.read_remote_spec( filename, **kwargs) header['filename'] = filename meta = {'header': header, 'expr': 'Vega from {0}'.format(os.path.basename(filename))} return cls(Empirical1D, points=wavelengths, lookup_table=fluxes, meta=meta)
python
def from_vega(cls, **kwargs): """Load :ref:`Vega spectrum <synphot-vega-spec>`. Parameters ---------- kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_remote_spec`. Returns ------- vegaspec : `SourceSpectrum` Empirical Vega spectrum. """ filename = conf.vega_file header, wavelengths, fluxes = specio.read_remote_spec( filename, **kwargs) header['filename'] = filename meta = {'header': header, 'expr': 'Vega from {0}'.format(os.path.basename(filename))} return cls(Empirical1D, points=wavelengths, lookup_table=fluxes, meta=meta)
[ "def", "from_vega", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "filename", "=", "conf", ".", "vega_file", "header", ",", "wavelengths", ",", "fluxes", "=", "specio", ".", "read_remote_spec", "(", "filename", ",", "*", "*", "kwargs", ")", "header", "[", "'filename'", "]", "=", "filename", "meta", "=", "{", "'header'", ":", "header", ",", "'expr'", ":", "'Vega from {0}'", ".", "format", "(", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", "}", "return", "cls", "(", "Empirical1D", ",", "points", "=", "wavelengths", ",", "lookup_table", "=", "fluxes", ",", "meta", "=", "meta", ")" ]
Load :ref:`Vega spectrum <synphot-vega-spec>`. Parameters ---------- kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_remote_spec`. Returns ------- vegaspec : `SourceSpectrum` Empirical Vega spectrum.
[ "Load", ":", "ref", ":", "Vega", "spectrum", "<synphot", "-", "vega", "-", "spec", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1247-L1268
spacetelescope/synphot_refactor
synphot/spectrum.py
BaseUnitlessSpectrum._validate_flux_unit
def _validate_flux_unit(new_unit): # pragma: no cover """Make sure flux unit is valid.""" new_unit = units.validate_unit(new_unit) if new_unit.decompose() != u.dimensionless_unscaled: raise exceptions.SynphotError( 'Unit {0} is not dimensionless'.format(new_unit)) return new_unit
python
def _validate_flux_unit(new_unit): # pragma: no cover """Make sure flux unit is valid.""" new_unit = units.validate_unit(new_unit) if new_unit.decompose() != u.dimensionless_unscaled: raise exceptions.SynphotError( 'Unit {0} is not dimensionless'.format(new_unit)) return new_unit
[ "def", "_validate_flux_unit", "(", "new_unit", ")", ":", "# pragma: no cover", "new_unit", "=", "units", ".", "validate_unit", "(", "new_unit", ")", "if", "new_unit", ".", "decompose", "(", ")", "!=", "u", ".", "dimensionless_unscaled", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Unit {0} is not dimensionless'", ".", "format", "(", "new_unit", ")", ")", "return", "new_unit" ]
Make sure flux unit is valid.
[ "Make", "sure", "flux", "unit", "is", "valid", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1280-L1288
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.check_overlap
def check_overlap(self, other, wavelengths=None, threshold=0.01): """Check for wavelength overlap between two spectra. Only wavelengths where ``self`` throughput is non-zero are considered. Example of full overlap:: |---------- other ----------| |------ self ------| Examples of partial overlap:: |---------- self ----------| |------ other ------| |---- other ----| |---- self ----| |---- self ----| |---- other ----| Examples of no overlap:: |---- self ----| |---- other ----| |---- other ----| |---- self ----| Parameters ---------- other : `BaseSpectrum` wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. threshold : float If less than this fraction of flux or throughput falls outside wavelength overlap, the *lack* of overlap is *insignificant*. This is only used when partial overlap is detected. Default is 1%. Returns ------- result : {'full', 'partial_most', 'partial_notmost', 'none'} * 'full' - ``self`` coverage is within or same as ``other`` * 'partial_most' - Less than ``threshold`` fraction of ``self`` flux is outside the overlapping wavelength region, i.e., the *lack* of overlap is *insignificant* * 'partial_notmost' - ``self`` partially overlaps with ``other`` but does not qualify for 'partial_most' * 'none' - ``self`` does not overlap ``other`` Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ if not isinstance(other, BaseSpectrum): raise exceptions.SynphotError( 'other must be spectrum or bandpass.') # Special cases where no sampling wavelengths given and # one of the inputs is continuous. if wavelengths is None: if other.waveset is None: return 'full' if self.waveset is None: return 'partial_notmost' x1 = self._validate_wavelengths(wavelengths) y1 = self(x1) a = x1[y1 > 0].value b = other._validate_wavelengths(wavelengths).value result = utils.overlap_status(a, b) if result == 'partial': # If there is no need to extrapolate or taper other # (i.e., other is zero at self's wave limits), # then we consider it as a full coverage. # This logic assumes __call__ never returns mag or count! if ((isinstance(other.model, Empirical1D) and other.model.is_tapered() or not isinstance(other.model, (Empirical1D, _CompoundModel))) and np.allclose(other(x1[::x1.size - 1]).value, 0)): result = 'full' # Check if the lack of overlap is significant. else: # Get all the flux totalflux = self.integrate(wavelengths=wavelengths).value utils.validate_totalflux(totalflux) a_min, a_max = a.min(), a.max() b_min, b_max = b.min(), b.max() # Now get the other two pieces excluded = 0.0 if a_min < b_min: excluded += self.integrate( wavelengths=np.array([a_min, b_min])).value if a_max > b_max: excluded += self.integrate( wavelengths=np.array([b_max, a_max])).value if excluded / totalflux < threshold: result = 'partial_most' else: result = 'partial_notmost' return result
python
def check_overlap(self, other, wavelengths=None, threshold=0.01): """Check for wavelength overlap between two spectra. Only wavelengths where ``self`` throughput is non-zero are considered. Example of full overlap:: |---------- other ----------| |------ self ------| Examples of partial overlap:: |---------- self ----------| |------ other ------| |---- other ----| |---- self ----| |---- self ----| |---- other ----| Examples of no overlap:: |---- self ----| |---- other ----| |---- other ----| |---- self ----| Parameters ---------- other : `BaseSpectrum` wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. threshold : float If less than this fraction of flux or throughput falls outside wavelength overlap, the *lack* of overlap is *insignificant*. This is only used when partial overlap is detected. Default is 1%. Returns ------- result : {'full', 'partial_most', 'partial_notmost', 'none'} * 'full' - ``self`` coverage is within or same as ``other`` * 'partial_most' - Less than ``threshold`` fraction of ``self`` flux is outside the overlapping wavelength region, i.e., the *lack* of overlap is *insignificant* * 'partial_notmost' - ``self`` partially overlaps with ``other`` but does not qualify for 'partial_most' * 'none' - ``self`` does not overlap ``other`` Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ if not isinstance(other, BaseSpectrum): raise exceptions.SynphotError( 'other must be spectrum or bandpass.') # Special cases where no sampling wavelengths given and # one of the inputs is continuous. if wavelengths is None: if other.waveset is None: return 'full' if self.waveset is None: return 'partial_notmost' x1 = self._validate_wavelengths(wavelengths) y1 = self(x1) a = x1[y1 > 0].value b = other._validate_wavelengths(wavelengths).value result = utils.overlap_status(a, b) if result == 'partial': # If there is no need to extrapolate or taper other # (i.e., other is zero at self's wave limits), # then we consider it as a full coverage. # This logic assumes __call__ never returns mag or count! if ((isinstance(other.model, Empirical1D) and other.model.is_tapered() or not isinstance(other.model, (Empirical1D, _CompoundModel))) and np.allclose(other(x1[::x1.size - 1]).value, 0)): result = 'full' # Check if the lack of overlap is significant. else: # Get all the flux totalflux = self.integrate(wavelengths=wavelengths).value utils.validate_totalflux(totalflux) a_min, a_max = a.min(), a.max() b_min, b_max = b.min(), b.max() # Now get the other two pieces excluded = 0.0 if a_min < b_min: excluded += self.integrate( wavelengths=np.array([a_min, b_min])).value if a_max > b_max: excluded += self.integrate( wavelengths=np.array([b_max, a_max])).value if excluded / totalflux < threshold: result = 'partial_most' else: result = 'partial_notmost' return result
[ "def", "check_overlap", "(", "self", ",", "other", ",", "wavelengths", "=", "None", ",", "threshold", "=", "0.01", ")", ":", "if", "not", "isinstance", "(", "other", ",", "BaseSpectrum", ")", ":", "raise", "exceptions", ".", "SynphotError", "(", "'other must be spectrum or bandpass.'", ")", "# Special cases where no sampling wavelengths given and", "# one of the inputs is continuous.", "if", "wavelengths", "is", "None", ":", "if", "other", ".", "waveset", "is", "None", ":", "return", "'full'", "if", "self", ".", "waveset", "is", "None", ":", "return", "'partial_notmost'", "x1", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", "y1", "=", "self", "(", "x1", ")", "a", "=", "x1", "[", "y1", ">", "0", "]", ".", "value", "b", "=", "other", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "result", "=", "utils", ".", "overlap_status", "(", "a", ",", "b", ")", "if", "result", "==", "'partial'", ":", "# If there is no need to extrapolate or taper other", "# (i.e., other is zero at self's wave limits),", "# then we consider it as a full coverage.", "# This logic assumes __call__ never returns mag or count!", "if", "(", "(", "isinstance", "(", "other", ".", "model", ",", "Empirical1D", ")", "and", "other", ".", "model", ".", "is_tapered", "(", ")", "or", "not", "isinstance", "(", "other", ".", "model", ",", "(", "Empirical1D", ",", "_CompoundModel", ")", ")", ")", "and", "np", ".", "allclose", "(", "other", "(", "x1", "[", ":", ":", "x1", ".", "size", "-", "1", "]", ")", ".", "value", ",", "0", ")", ")", ":", "result", "=", "'full'", "# Check if the lack of overlap is significant.", "else", ":", "# Get all the flux", "totalflux", "=", "self", ".", "integrate", "(", "wavelengths", "=", "wavelengths", ")", ".", "value", "utils", ".", "validate_totalflux", "(", "totalflux", ")", "a_min", ",", "a_max", "=", "a", ".", "min", "(", ")", ",", "a", ".", "max", "(", ")", "b_min", ",", "b_max", "=", "b", ".", "min", "(", ")", ",", "b", ".", "max", "(", ")", "# Now get the other two pieces", "excluded", "=", "0.0", "if", "a_min", "<", "b_min", ":", "excluded", "+=", "self", ".", "integrate", "(", "wavelengths", "=", "np", ".", "array", "(", "[", "a_min", ",", "b_min", "]", ")", ")", ".", "value", "if", "a_max", ">", "b_max", ":", "excluded", "+=", "self", ".", "integrate", "(", "wavelengths", "=", "np", ".", "array", "(", "[", "b_max", ",", "a_max", "]", ")", ")", ".", "value", "if", "excluded", "/", "totalflux", "<", "threshold", ":", "result", "=", "'partial_most'", "else", ":", "result", "=", "'partial_notmost'", "return", "result" ]
Check for wavelength overlap between two spectra. Only wavelengths where ``self`` throughput is non-zero are considered. Example of full overlap:: |---------- other ----------| |------ self ------| Examples of partial overlap:: |---------- self ----------| |------ other ------| |---- other ----| |---- self ----| |---- self ----| |---- other ----| Examples of no overlap:: |---- self ----| |---- other ----| |---- other ----| |---- self ----| Parameters ---------- other : `BaseSpectrum` wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, `waveset` is used. threshold : float If less than this fraction of flux or throughput falls outside wavelength overlap, the *lack* of overlap is *insignificant*. This is only used when partial overlap is detected. Default is 1%. Returns ------- result : {'full', 'partial_most', 'partial_notmost', 'none'} * 'full' - ``self`` coverage is within or same as ``other`` * 'partial_most' - Less than ``threshold`` fraction of ``self`` flux is outside the overlapping wavelength region, i.e., the *lack* of overlap is *insignificant* * 'partial_notmost' - ``self`` partially overlaps with ``other`` but does not qualify for 'partial_most' * 'none' - ``self`` does not overlap ``other`` Raises ------ synphot.exceptions.SynphotError Invalid inputs.
[ "Check", "for", "wavelength", "overlap", "between", "two", "spectra", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1333-L1448
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.unit_response
def unit_response(self, area, wavelengths=None): """Calculate :ref:`unit response <synphot-formula-uresp>` of this bandpass. Parameters ---------- area : float or `~astropy.units.quantity.Quantity` Area that flux covers. If not a Quantity, assumed to be in :math:`cm^{2}`. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- uresp : `~astropy.units.quantity.Quantity` Flux (in FLAM) of a star that produces a response of one photon per second in this bandpass. """ a = units.validate_quantity(area, units.AREA) # Only correct if wavelengths are in Angstrom. x = self._validate_wavelengths(wavelengths).value y = self(x).value * x int_val = abs(np.trapz(y, x=x)) uresp = units.HC / (a.cgs * int_val) return uresp.value * units.FLAM
python
def unit_response(self, area, wavelengths=None): """Calculate :ref:`unit response <synphot-formula-uresp>` of this bandpass. Parameters ---------- area : float or `~astropy.units.quantity.Quantity` Area that flux covers. If not a Quantity, assumed to be in :math:`cm^{2}`. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- uresp : `~astropy.units.quantity.Quantity` Flux (in FLAM) of a star that produces a response of one photon per second in this bandpass. """ a = units.validate_quantity(area, units.AREA) # Only correct if wavelengths are in Angstrom. x = self._validate_wavelengths(wavelengths).value y = self(x).value * x int_val = abs(np.trapz(y, x=x)) uresp = units.HC / (a.cgs * int_val) return uresp.value * units.FLAM
[ "def", "unit_response", "(", "self", ",", "area", ",", "wavelengths", "=", "None", ")", ":", "a", "=", "units", ".", "validate_quantity", "(", "area", ",", "units", ".", "AREA", ")", "# Only correct if wavelengths are in Angstrom.", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", "(", "x", ")", ".", "value", "*", "x", "int_val", "=", "abs", "(", "np", ".", "trapz", "(", "y", ",", "x", "=", "x", ")", ")", "uresp", "=", "units", ".", "HC", "/", "(", "a", ".", "cgs", "*", "int_val", ")", "return", "uresp", ".", "value", "*", "units", ".", "FLAM" ]
Calculate :ref:`unit response <synphot-formula-uresp>` of this bandpass. Parameters ---------- area : float or `~astropy.units.quantity.Quantity` Area that flux covers. If not a Quantity, assumed to be in :math:`cm^{2}`. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- uresp : `~astropy.units.quantity.Quantity` Flux (in FLAM) of a star that produces a response of one photon per second in this bandpass.
[ "Calculate", ":", "ref", ":", "unit", "response", "<synphot", "-", "formula", "-", "uresp", ">", "of", "this", "bandpass", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1450-L1481
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.rmswidth
def rmswidth(self, wavelengths=None, threshold=None): """Calculate the :ref:`bandpass RMS width <synphot-formula-rmswidth>`. Not to be confused with :func:`photbw`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. threshold : float or `~astropy.units.quantity.Quantity`, optional Data points with throughput below this value are not included in the calculation. By default, all data points are included. Returns ------- rms_width : `~astropy.units.quantity.Quantity` RMS width of the bandpass. Raises ------ synphot.exceptions.SynphotError Threshold is invalid. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value if threshold is None: wave = x thru = y else: if (isinstance(threshold, numbers.Real) or (isinstance(threshold, u.Quantity) and threshold.unit == self._internal_flux_unit)): mask = y >= threshold else: raise exceptions.SynphotError( '{0} is not a valid threshold'.format(threshold)) wave = x[mask] thru = y[mask] a = self.avgwave(wavelengths=wavelengths).value num = np.trapz((wave - a) ** 2 * thru, x=wave) den = np.trapz(thru, x=wave) if den == 0: # pragma: no cover rms_width = 0.0 else: rms_width = np.sqrt(abs(num / den)) return rms_width * self._internal_wave_unit
python
def rmswidth(self, wavelengths=None, threshold=None): """Calculate the :ref:`bandpass RMS width <synphot-formula-rmswidth>`. Not to be confused with :func:`photbw`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. threshold : float or `~astropy.units.quantity.Quantity`, optional Data points with throughput below this value are not included in the calculation. By default, all data points are included. Returns ------- rms_width : `~astropy.units.quantity.Quantity` RMS width of the bandpass. Raises ------ synphot.exceptions.SynphotError Threshold is invalid. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value if threshold is None: wave = x thru = y else: if (isinstance(threshold, numbers.Real) or (isinstance(threshold, u.Quantity) and threshold.unit == self._internal_flux_unit)): mask = y >= threshold else: raise exceptions.SynphotError( '{0} is not a valid threshold'.format(threshold)) wave = x[mask] thru = y[mask] a = self.avgwave(wavelengths=wavelengths).value num = np.trapz((wave - a) ** 2 * thru, x=wave) den = np.trapz(thru, x=wave) if den == 0: # pragma: no cover rms_width = 0.0 else: rms_width = np.sqrt(abs(num / den)) return rms_width * self._internal_wave_unit
[ "def", "rmswidth", "(", "self", ",", "wavelengths", "=", "None", ",", "threshold", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", "(", "x", ")", ".", "value", "if", "threshold", "is", "None", ":", "wave", "=", "x", "thru", "=", "y", "else", ":", "if", "(", "isinstance", "(", "threshold", ",", "numbers", ".", "Real", ")", "or", "(", "isinstance", "(", "threshold", ",", "u", ".", "Quantity", ")", "and", "threshold", ".", "unit", "==", "self", ".", "_internal_flux_unit", ")", ")", ":", "mask", "=", "y", ">=", "threshold", "else", ":", "raise", "exceptions", ".", "SynphotError", "(", "'{0} is not a valid threshold'", ".", "format", "(", "threshold", ")", ")", "wave", "=", "x", "[", "mask", "]", "thru", "=", "y", "[", "mask", "]", "a", "=", "self", ".", "avgwave", "(", "wavelengths", "=", "wavelengths", ")", ".", "value", "num", "=", "np", ".", "trapz", "(", "(", "wave", "-", "a", ")", "**", "2", "*", "thru", ",", "x", "=", "wave", ")", "den", "=", "np", ".", "trapz", "(", "thru", ",", "x", "=", "wave", ")", "if", "den", "==", "0", ":", "# pragma: no cover", "rms_width", "=", "0.0", "else", ":", "rms_width", "=", "np", ".", "sqrt", "(", "abs", "(", "num", "/", "den", ")", ")", "return", "rms_width", "*", "self", ".", "_internal_wave_unit" ]
Calculate the :ref:`bandpass RMS width <synphot-formula-rmswidth>`. Not to be confused with :func:`photbw`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. threshold : float or `~astropy.units.quantity.Quantity`, optional Data points with throughput below this value are not included in the calculation. By default, all data points are included. Returns ------- rms_width : `~astropy.units.quantity.Quantity` RMS width of the bandpass. Raises ------ synphot.exceptions.SynphotError Threshold is invalid.
[ "Calculate", "the", ":", "ref", ":", "bandpass", "RMS", "width", "<synphot", "-", "formula", "-", "rmswidth", ">", ".", "Not", "to", "be", "confused", "with", ":", "func", ":", "photbw", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1483-L1536
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.fwhm
def fwhm(self, **kwargs): """Calculate :ref:`synphot-formula-fwhm` of equivalent gaussian. Parameters ---------- kwargs : dict See :func:`photbw`. Returns ------- fwhm_val : `~astropy.units.quantity.Quantity` FWHM of equivalent gaussian. """ return np.sqrt(8 * np.log(2)) * self.photbw(**kwargs)
python
def fwhm(self, **kwargs): """Calculate :ref:`synphot-formula-fwhm` of equivalent gaussian. Parameters ---------- kwargs : dict See :func:`photbw`. Returns ------- fwhm_val : `~astropy.units.quantity.Quantity` FWHM of equivalent gaussian. """ return np.sqrt(8 * np.log(2)) * self.photbw(**kwargs)
[ "def", "fwhm", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "np", ".", "sqrt", "(", "8", "*", "np", ".", "log", "(", "2", ")", ")", "*", "self", ".", "photbw", "(", "*", "*", "kwargs", ")" ]
Calculate :ref:`synphot-formula-fwhm` of equivalent gaussian. Parameters ---------- kwargs : dict See :func:`photbw`. Returns ------- fwhm_val : `~astropy.units.quantity.Quantity` FWHM of equivalent gaussian.
[ "Calculate", ":", "ref", ":", "synphot", "-", "formula", "-", "fwhm", "of", "equivalent", "gaussian", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1600-L1614
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.tpeak
def tpeak(self, wavelengths=None): """Calculate :ref:`peak bandpass throughput <synphot-formula-tpeak>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- tpeak : `~astropy.units.quantity.Quantity` Peak bandpass throughput. """ x = self._validate_wavelengths(wavelengths).value return self(x).max()
python
def tpeak(self, wavelengths=None): """Calculate :ref:`peak bandpass throughput <synphot-formula-tpeak>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- tpeak : `~astropy.units.quantity.Quantity` Peak bandpass throughput. """ x = self._validate_wavelengths(wavelengths).value return self(x).max()
[ "def", "tpeak", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "return", "self", "(", "x", ")", ".", "max", "(", ")" ]
Calculate :ref:`peak bandpass throughput <synphot-formula-tpeak>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- tpeak : `~astropy.units.quantity.Quantity` Peak bandpass throughput.
[ "Calculate", ":", "ref", ":", "peak", "bandpass", "throughput", "<synphot", "-", "formula", "-", "tpeak", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1633-L1650
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.wpeak
def wpeak(self, wavelengths=None): """Calculate :ref:`wavelength at peak throughput <synphot-formula-tpeak>`. If there are multiple data points with peak throughput value, only the first match is returned. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- wpeak : `~astropy.units.quantity.Quantity` Wavelength at peak throughput. """ x = self._validate_wavelengths(wavelengths) return x[self(x) == self.tpeak(wavelengths=wavelengths)][0]
python
def wpeak(self, wavelengths=None): """Calculate :ref:`wavelength at peak throughput <synphot-formula-tpeak>`. If there are multiple data points with peak throughput value, only the first match is returned. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- wpeak : `~astropy.units.quantity.Quantity` Wavelength at peak throughput. """ x = self._validate_wavelengths(wavelengths) return x[self(x) == self.tpeak(wavelengths=wavelengths)][0]
[ "def", "wpeak", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", "return", "x", "[", "self", "(", "x", ")", "==", "self", ".", "tpeak", "(", "wavelengths", "=", "wavelengths", ")", "]", "[", "0", "]" ]
Calculate :ref:`wavelength at peak throughput <synphot-formula-tpeak>`. If there are multiple data points with peak throughput value, only the first match is returned. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- wpeak : `~astropy.units.quantity.Quantity` Wavelength at peak throughput.
[ "Calculate", ":", "ref", ":", "wavelength", "at", "peak", "throughput", "<synphot", "-", "formula", "-", "tpeak", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1652-L1673
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.rectwidth
def rectwidth(self, wavelengths=None): """Calculate :ref:`bandpass rectangular width <synphot-formula-rectw>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- rectw : `~astropy.units.quantity.Quantity` Bandpass rectangular width. """ equvw = self.equivwidth(wavelengths=wavelengths) tpeak = self.tpeak(wavelengths=wavelengths) if tpeak.value == 0: # pragma: no cover rectw = 0.0 * self._internal_wave_unit else: rectw = equvw / tpeak return rectw
python
def rectwidth(self, wavelengths=None): """Calculate :ref:`bandpass rectangular width <synphot-formula-rectw>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- rectw : `~astropy.units.quantity.Quantity` Bandpass rectangular width. """ equvw = self.equivwidth(wavelengths=wavelengths) tpeak = self.tpeak(wavelengths=wavelengths) if tpeak.value == 0: # pragma: no cover rectw = 0.0 * self._internal_wave_unit else: rectw = equvw / tpeak return rectw
[ "def", "rectwidth", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "equvw", "=", "self", ".", "equivwidth", "(", "wavelengths", "=", "wavelengths", ")", "tpeak", "=", "self", ".", "tpeak", "(", "wavelengths", "=", "wavelengths", ")", "if", "tpeak", ".", "value", "==", "0", ":", "# pragma: no cover", "rectw", "=", "0.0", "*", "self", ".", "_internal_wave_unit", "else", ":", "rectw", "=", "equvw", "/", "tpeak", "return", "rectw" ]
Calculate :ref:`bandpass rectangular width <synphot-formula-rectw>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- rectw : `~astropy.units.quantity.Quantity` Bandpass rectangular width.
[ "Calculate", ":", "ref", ":", "bandpass", "rectangular", "width", "<synphot", "-", "formula", "-", "rectw", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1693-L1717
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.efficiency
def efficiency(self, wavelengths=None): """Calculate :ref:`dimensionless efficiency <synphot-formula-qtlam>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- qtlam : `~astropy.units.quantity.Quantity` Dimensionless efficiency. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value qtlam = abs(np.trapz(y / x, x=x)) return qtlam * u.dimensionless_unscaled
python
def efficiency(self, wavelengths=None): """Calculate :ref:`dimensionless efficiency <synphot-formula-qtlam>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- qtlam : `~astropy.units.quantity.Quantity` Dimensionless efficiency. """ x = self._validate_wavelengths(wavelengths).value y = self(x).value qtlam = abs(np.trapz(y / x, x=x)) return qtlam * u.dimensionless_unscaled
[ "def", "efficiency", "(", "self", ",", "wavelengths", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", "(", "x", ")", ".", "value", "qtlam", "=", "abs", "(", "np", ".", "trapz", "(", "y", "/", "x", ",", "x", "=", "x", ")", ")", "return", "qtlam", "*", "u", ".", "dimensionless_unscaled" ]
Calculate :ref:`dimensionless efficiency <synphot-formula-qtlam>`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. Returns ------- qtlam : `~astropy.units.quantity.Quantity` Dimensionless efficiency.
[ "Calculate", ":", "ref", ":", "dimensionless", "efficiency", "<synphot", "-", "formula", "-", "qtlam", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1719-L1738
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.emflx
def emflx(self, area, wavelengths=None): """Calculate :ref:`equivalent monochromatic flux <synphot-formula-emflx>`. Parameters ---------- area, wavelengths See :func:`unit_response`. Returns ------- em_flux : `~astropy.units.quantity.Quantity` Equivalent monochromatic flux. """ t_lambda = self.tlambda(wavelengths=wavelengths) if t_lambda == 0: # pragma: no cover em_flux = 0.0 * units.FLAM else: uresp = self.unit_response(area, wavelengths=wavelengths) equvw = self.equivwidth(wavelengths=wavelengths).value em_flux = uresp * equvw / t_lambda return em_flux
python
def emflx(self, area, wavelengths=None): """Calculate :ref:`equivalent monochromatic flux <synphot-formula-emflx>`. Parameters ---------- area, wavelengths See :func:`unit_response`. Returns ------- em_flux : `~astropy.units.quantity.Quantity` Equivalent monochromatic flux. """ t_lambda = self.tlambda(wavelengths=wavelengths) if t_lambda == 0: # pragma: no cover em_flux = 0.0 * units.FLAM else: uresp = self.unit_response(area, wavelengths=wavelengths) equvw = self.equivwidth(wavelengths=wavelengths).value em_flux = uresp * equvw / t_lambda return em_flux
[ "def", "emflx", "(", "self", ",", "area", ",", "wavelengths", "=", "None", ")", ":", "t_lambda", "=", "self", ".", "tlambda", "(", "wavelengths", "=", "wavelengths", ")", "if", "t_lambda", "==", "0", ":", "# pragma: no cover", "em_flux", "=", "0.0", "*", "units", ".", "FLAM", "else", ":", "uresp", "=", "self", ".", "unit_response", "(", "area", ",", "wavelengths", "=", "wavelengths", ")", "equvw", "=", "self", ".", "equivwidth", "(", "wavelengths", "=", "wavelengths", ")", ".", "value", "em_flux", "=", "uresp", "*", "equvw", "/", "t_lambda", "return", "em_flux" ]
Calculate :ref:`equivalent monochromatic flux <synphot-formula-emflx>`. Parameters ---------- area, wavelengths See :func:`unit_response`. Returns ------- em_flux : `~astropy.units.quantity.Quantity` Equivalent monochromatic flux.
[ "Calculate", ":", "ref", ":", "equivalent", "monochromatic", "flux", "<synphot", "-", "formula", "-", "emflx", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1740-L1764
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.from_file
def from_file(cls, filename, **kwargs): """Creates a bandpass from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Bandpass filename. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- bp : `SpectralElement` Empirical bandpass. """ if 'flux_unit' not in kwargs: kwargs['flux_unit'] = cls._internal_flux_unit if ((filename.endswith('fits') or filename.endswith('fit')) and 'flux_col' not in kwargs): kwargs['flux_col'] = 'THROUGHPUT' header, wavelengths, throughput = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=throughput, keep_neg=True, meta={'header': header})
python
def from_file(cls, filename, **kwargs): """Creates a bandpass from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Bandpass filename. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- bp : `SpectralElement` Empirical bandpass. """ if 'flux_unit' not in kwargs: kwargs['flux_unit'] = cls._internal_flux_unit if ((filename.endswith('fits') or filename.endswith('fit')) and 'flux_col' not in kwargs): kwargs['flux_col'] = 'THROUGHPUT' header, wavelengths, throughput = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=throughput, keep_neg=True, meta={'header': header})
[ "def", "from_file", "(", "cls", ",", "filename", ",", "*", "*", "kwargs", ")", ":", "if", "'flux_unit'", "not", "in", "kwargs", ":", "kwargs", "[", "'flux_unit'", "]", "=", "cls", ".", "_internal_flux_unit", "if", "(", "(", "filename", ".", "endswith", "(", "'fits'", ")", "or", "filename", ".", "endswith", "(", "'fit'", ")", ")", "and", "'flux_col'", "not", "in", "kwargs", ")", ":", "kwargs", "[", "'flux_col'", "]", "=", "'THROUGHPUT'", "header", ",", "wavelengths", ",", "throughput", "=", "specio", ".", "read_spec", "(", "filename", ",", "*", "*", "kwargs", ")", "return", "cls", "(", "Empirical1D", ",", "points", "=", "wavelengths", ",", "lookup_table", "=", "throughput", ",", "keep_neg", "=", "True", ",", "meta", "=", "{", "'header'", ":", "header", "}", ")" ]
Creates a bandpass from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Bandpass filename. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- bp : `SpectralElement` Empirical bandpass.
[ "Creates", "a", "bandpass", "from", "file", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1805-L1836
spacetelescope/synphot_refactor
synphot/spectrum.py
SpectralElement.from_filter
def from_filter(cls, filtername, **kwargs): """Load :ref:`pre-defined filter bandpass <synphot-predefined-filter>`. Parameters ---------- filtername : str Filter name. Choose from 'bessel_j', 'bessel_h', 'bessel_k', 'cousins_r', 'cousins_i', 'johnson_u', 'johnson_b', 'johnson_v', 'johnson_r', 'johnson_i', 'johnson_j', or 'johnson_k'. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_remote_spec`. Returns ------- bp : `SpectralElement` Empirical bandpass. Raises ------ synphot.exceptions.SynphotError Invalid filter name. """ filtername = filtername.lower() # Select filename based on filter name if filtername == 'bessel_j': cfgitem = Conf.bessel_j_file elif filtername == 'bessel_h': cfgitem = Conf.bessel_h_file elif filtername == 'bessel_k': cfgitem = Conf.bessel_k_file elif filtername == 'cousins_r': cfgitem = Conf.cousins_r_file elif filtername == 'cousins_i': cfgitem = Conf.cousins_i_file elif filtername == 'johnson_u': cfgitem = Conf.johnson_u_file elif filtername == 'johnson_b': cfgitem = Conf.johnson_b_file elif filtername == 'johnson_v': cfgitem = Conf.johnson_v_file elif filtername == 'johnson_r': cfgitem = Conf.johnson_r_file elif filtername == 'johnson_i': cfgitem = Conf.johnson_i_file elif filtername == 'johnson_j': cfgitem = Conf.johnson_j_file elif filtername == 'johnson_k': cfgitem = Conf.johnson_k_file else: raise exceptions.SynphotError( 'Filter name {0} is invalid.'.format(filtername)) filename = cfgitem() if 'flux_unit' not in kwargs: kwargs['flux_unit'] = cls._internal_flux_unit if ((filename.endswith('fits') or filename.endswith('fit')) and 'flux_col' not in kwargs): kwargs['flux_col'] = 'THROUGHPUT' header, wavelengths, throughput = specio.read_remote_spec( filename, **kwargs) header['filename'] = filename header['descrip'] = cfgitem.description meta = {'header': header, 'expr': filtername} return cls(Empirical1D, points=wavelengths, lookup_table=throughput, meta=meta)
python
def from_filter(cls, filtername, **kwargs): """Load :ref:`pre-defined filter bandpass <synphot-predefined-filter>`. Parameters ---------- filtername : str Filter name. Choose from 'bessel_j', 'bessel_h', 'bessel_k', 'cousins_r', 'cousins_i', 'johnson_u', 'johnson_b', 'johnson_v', 'johnson_r', 'johnson_i', 'johnson_j', or 'johnson_k'. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_remote_spec`. Returns ------- bp : `SpectralElement` Empirical bandpass. Raises ------ synphot.exceptions.SynphotError Invalid filter name. """ filtername = filtername.lower() # Select filename based on filter name if filtername == 'bessel_j': cfgitem = Conf.bessel_j_file elif filtername == 'bessel_h': cfgitem = Conf.bessel_h_file elif filtername == 'bessel_k': cfgitem = Conf.bessel_k_file elif filtername == 'cousins_r': cfgitem = Conf.cousins_r_file elif filtername == 'cousins_i': cfgitem = Conf.cousins_i_file elif filtername == 'johnson_u': cfgitem = Conf.johnson_u_file elif filtername == 'johnson_b': cfgitem = Conf.johnson_b_file elif filtername == 'johnson_v': cfgitem = Conf.johnson_v_file elif filtername == 'johnson_r': cfgitem = Conf.johnson_r_file elif filtername == 'johnson_i': cfgitem = Conf.johnson_i_file elif filtername == 'johnson_j': cfgitem = Conf.johnson_j_file elif filtername == 'johnson_k': cfgitem = Conf.johnson_k_file else: raise exceptions.SynphotError( 'Filter name {0} is invalid.'.format(filtername)) filename = cfgitem() if 'flux_unit' not in kwargs: kwargs['flux_unit'] = cls._internal_flux_unit if ((filename.endswith('fits') or filename.endswith('fit')) and 'flux_col' not in kwargs): kwargs['flux_col'] = 'THROUGHPUT' header, wavelengths, throughput = specio.read_remote_spec( filename, **kwargs) header['filename'] = filename header['descrip'] = cfgitem.description meta = {'header': header, 'expr': filtername} return cls(Empirical1D, points=wavelengths, lookup_table=throughput, meta=meta)
[ "def", "from_filter", "(", "cls", ",", "filtername", ",", "*", "*", "kwargs", ")", ":", "filtername", "=", "filtername", ".", "lower", "(", ")", "# Select filename based on filter name", "if", "filtername", "==", "'bessel_j'", ":", "cfgitem", "=", "Conf", ".", "bessel_j_file", "elif", "filtername", "==", "'bessel_h'", ":", "cfgitem", "=", "Conf", ".", "bessel_h_file", "elif", "filtername", "==", "'bessel_k'", ":", "cfgitem", "=", "Conf", ".", "bessel_k_file", "elif", "filtername", "==", "'cousins_r'", ":", "cfgitem", "=", "Conf", ".", "cousins_r_file", "elif", "filtername", "==", "'cousins_i'", ":", "cfgitem", "=", "Conf", ".", "cousins_i_file", "elif", "filtername", "==", "'johnson_u'", ":", "cfgitem", "=", "Conf", ".", "johnson_u_file", "elif", "filtername", "==", "'johnson_b'", ":", "cfgitem", "=", "Conf", ".", "johnson_b_file", "elif", "filtername", "==", "'johnson_v'", ":", "cfgitem", "=", "Conf", ".", "johnson_v_file", "elif", "filtername", "==", "'johnson_r'", ":", "cfgitem", "=", "Conf", ".", "johnson_r_file", "elif", "filtername", "==", "'johnson_i'", ":", "cfgitem", "=", "Conf", ".", "johnson_i_file", "elif", "filtername", "==", "'johnson_j'", ":", "cfgitem", "=", "Conf", ".", "johnson_j_file", "elif", "filtername", "==", "'johnson_k'", ":", "cfgitem", "=", "Conf", ".", "johnson_k_file", "else", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Filter name {0} is invalid.'", ".", "format", "(", "filtername", ")", ")", "filename", "=", "cfgitem", "(", ")", "if", "'flux_unit'", "not", "in", "kwargs", ":", "kwargs", "[", "'flux_unit'", "]", "=", "cls", ".", "_internal_flux_unit", "if", "(", "(", "filename", ".", "endswith", "(", "'fits'", ")", "or", "filename", ".", "endswith", "(", "'fit'", ")", ")", "and", "'flux_col'", "not", "in", "kwargs", ")", ":", "kwargs", "[", "'flux_col'", "]", "=", "'THROUGHPUT'", "header", ",", "wavelengths", ",", "throughput", "=", "specio", ".", "read_remote_spec", "(", "filename", ",", "*", "*", "kwargs", ")", "header", "[", "'filename'", "]", "=", "filename", "header", "[", "'descrip'", "]", "=", "cfgitem", ".", "description", "meta", "=", "{", "'header'", ":", "header", ",", "'expr'", ":", "filtername", "}", "return", "cls", "(", "Empirical1D", ",", "points", "=", "wavelengths", ",", "lookup_table", "=", "throughput", ",", "meta", "=", "meta", ")" ]
Load :ref:`pre-defined filter bandpass <synphot-predefined-filter>`. Parameters ---------- filtername : str Filter name. Choose from 'bessel_j', 'bessel_h', 'bessel_k', 'cousins_r', 'cousins_i', 'johnson_u', 'johnson_b', 'johnson_v', 'johnson_r', 'johnson_i', 'johnson_j', or 'johnson_k'. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_remote_spec`. Returns ------- bp : `SpectralElement` Empirical bandpass. Raises ------ synphot.exceptions.SynphotError Invalid filter name.
[ "Load", ":", "ref", ":", "pre", "-", "defined", "filter", "bandpass", "<synphot", "-", "predefined", "-", "filter", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/spectrum.py#L1839-L1909
Julius2342/pyvlx
pyvlx/activate_scene.py
ActivateScene.handle_frame
async def handle_frame(self, frame): """Handle incoming API frame, return True if this was the expected frame.""" if isinstance(frame, FrameActivateSceneConfirmation) and frame.session_id == self.session_id: if frame.status == ActivateSceneConfirmationStatus.ACCEPTED: self.success = True return not self.wait_for_completion if isinstance(frame, FrameCommandRemainingTimeNotification) and frame.session_id == self.session_id: # Ignoring FrameCommandRemainingTimeNotification return False if isinstance(frame, FrameCommandRunStatusNotification) and frame.session_id == self.session_id: # At the moment I don't reall understand what the FrameCommandRunStatusNotification is good for. # Ignoring these packets for now return False if isinstance(frame, FrameSessionFinishedNotification) and frame.session_id == self.session_id: return True return False
python
async def handle_frame(self, frame): """Handle incoming API frame, return True if this was the expected frame.""" if isinstance(frame, FrameActivateSceneConfirmation) and frame.session_id == self.session_id: if frame.status == ActivateSceneConfirmationStatus.ACCEPTED: self.success = True return not self.wait_for_completion if isinstance(frame, FrameCommandRemainingTimeNotification) and frame.session_id == self.session_id: # Ignoring FrameCommandRemainingTimeNotification return False if isinstance(frame, FrameCommandRunStatusNotification) and frame.session_id == self.session_id: # At the moment I don't reall understand what the FrameCommandRunStatusNotification is good for. # Ignoring these packets for now return False if isinstance(frame, FrameSessionFinishedNotification) and frame.session_id == self.session_id: return True return False
[ "async", "def", "handle_frame", "(", "self", ",", "frame", ")", ":", "if", "isinstance", "(", "frame", ",", "FrameActivateSceneConfirmation", ")", "and", "frame", ".", "session_id", "==", "self", ".", "session_id", ":", "if", "frame", ".", "status", "==", "ActivateSceneConfirmationStatus", ".", "ACCEPTED", ":", "self", ".", "success", "=", "True", "return", "not", "self", ".", "wait_for_completion", "if", "isinstance", "(", "frame", ",", "FrameCommandRemainingTimeNotification", ")", "and", "frame", ".", "session_id", "==", "self", ".", "session_id", ":", "# Ignoring FrameCommandRemainingTimeNotification", "return", "False", "if", "isinstance", "(", "frame", ",", "FrameCommandRunStatusNotification", ")", "and", "frame", ".", "session_id", "==", "self", ".", "session_id", ":", "# At the moment I don't reall understand what the FrameCommandRunStatusNotification is good for.", "# Ignoring these packets for now", "return", "False", "if", "isinstance", "(", "frame", ",", "FrameSessionFinishedNotification", ")", "and", "frame", ".", "session_id", "==", "self", ".", "session_id", ":", "return", "True", "return", "False" ]
Handle incoming API frame, return True if this was the expected frame.
[ "Handle", "incoming", "API", "frame", "return", "True", "if", "this", "was", "the", "expected", "frame", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/activate_scene.py#L21-L36
Julius2342/pyvlx
pyvlx/activate_scene.py
ActivateScene.request_frame
def request_frame(self): """Construct initiating frame.""" self.session_id = get_new_session_id() return FrameActivateSceneRequest(scene_id=self.scene_id, session_id=self.session_id)
python
def request_frame(self): """Construct initiating frame.""" self.session_id = get_new_session_id() return FrameActivateSceneRequest(scene_id=self.scene_id, session_id=self.session_id)
[ "def", "request_frame", "(", "self", ")", ":", "self", ".", "session_id", "=", "get_new_session_id", "(", ")", "return", "FrameActivateSceneRequest", "(", "scene_id", "=", "self", ".", "scene_id", ",", "session_id", "=", "self", ".", "session_id", ")" ]
Construct initiating frame.
[ "Construct", "initiating", "frame", "." ]
train
https://github.com/Julius2342/pyvlx/blob/ee78e1324bcb1be5b8d1a9d05ab5496b72eae848/pyvlx/activate_scene.py#L38-L41
spacetelescope/synphot_refactor
synphot/observation.py
Observation._init_bins
def _init_bins(self, binset): """Calculated binned wavelength centers, edges, and flux. By contrast, the native waveset and flux should be considered samples of a continuous function. Thus, it makes sense to interpolate ``self.waveset`` and ``self(self.waveset)``, but not `binset` and `binflux`. """ if binset is None: if self.bandpass.waveset is not None: self._binset = self.bandpass.waveset elif self.spectrum.waveset is not None: self._binset = self.spectrum.waveset log.info('Bandpass waveset is undefined; ' 'Using source spectrum waveset instead.') else: raise exceptions.UndefinedBinset( 'Both source spectrum and bandpass have undefined ' 'waveset; Provide binset manually.') else: self._binset = self._validate_wavelengths(binset) # binset must be in ascending order for calcbinflux() # to work properly. if self._binset[0] > self._binset[-1]: self._binset = self._binset[::-1] self._bin_edges = binning.calculate_bin_edges(self._binset) # Merge bin edges and centers in with the natural waveset spwave = utils.merge_wavelengths( self._bin_edges.value, self._binset.value) if self.waveset is not None: spwave = utils.merge_wavelengths(spwave, self.waveset.value) # Throw out invalid wavelengths after merging. spwave = spwave[spwave > 0] # Compute indices associated to each endpoint. indices = np.searchsorted(spwave, self._bin_edges.value) i_beg = indices[:-1] i_end = indices[1:] # Prepare integration variables. flux = self(spwave) avflux = (flux.value[1:] + flux.value[:-1]) * 0.5 deltaw = spwave[1:] - spwave[:-1] # Sum over each bin. binflux, intwave = binning.calcbinflux( self._binset.size, i_beg, i_end, avflux, deltaw) self._binflux = binflux * flux.unit
python
def _init_bins(self, binset): """Calculated binned wavelength centers, edges, and flux. By contrast, the native waveset and flux should be considered samples of a continuous function. Thus, it makes sense to interpolate ``self.waveset`` and ``self(self.waveset)``, but not `binset` and `binflux`. """ if binset is None: if self.bandpass.waveset is not None: self._binset = self.bandpass.waveset elif self.spectrum.waveset is not None: self._binset = self.spectrum.waveset log.info('Bandpass waveset is undefined; ' 'Using source spectrum waveset instead.') else: raise exceptions.UndefinedBinset( 'Both source spectrum and bandpass have undefined ' 'waveset; Provide binset manually.') else: self._binset = self._validate_wavelengths(binset) # binset must be in ascending order for calcbinflux() # to work properly. if self._binset[0] > self._binset[-1]: self._binset = self._binset[::-1] self._bin_edges = binning.calculate_bin_edges(self._binset) # Merge bin edges and centers in with the natural waveset spwave = utils.merge_wavelengths( self._bin_edges.value, self._binset.value) if self.waveset is not None: spwave = utils.merge_wavelengths(spwave, self.waveset.value) # Throw out invalid wavelengths after merging. spwave = spwave[spwave > 0] # Compute indices associated to each endpoint. indices = np.searchsorted(spwave, self._bin_edges.value) i_beg = indices[:-1] i_end = indices[1:] # Prepare integration variables. flux = self(spwave) avflux = (flux.value[1:] + flux.value[:-1]) * 0.5 deltaw = spwave[1:] - spwave[:-1] # Sum over each bin. binflux, intwave = binning.calcbinflux( self._binset.size, i_beg, i_end, avflux, deltaw) self._binflux = binflux * flux.unit
[ "def", "_init_bins", "(", "self", ",", "binset", ")", ":", "if", "binset", "is", "None", ":", "if", "self", ".", "bandpass", ".", "waveset", "is", "not", "None", ":", "self", ".", "_binset", "=", "self", ".", "bandpass", ".", "waveset", "elif", "self", ".", "spectrum", ".", "waveset", "is", "not", "None", ":", "self", ".", "_binset", "=", "self", ".", "spectrum", ".", "waveset", "log", ".", "info", "(", "'Bandpass waveset is undefined; '", "'Using source spectrum waveset instead.'", ")", "else", ":", "raise", "exceptions", ".", "UndefinedBinset", "(", "'Both source spectrum and bandpass have undefined '", "'waveset; Provide binset manually.'", ")", "else", ":", "self", ".", "_binset", "=", "self", ".", "_validate_wavelengths", "(", "binset", ")", "# binset must be in ascending order for calcbinflux()", "# to work properly.", "if", "self", ".", "_binset", "[", "0", "]", ">", "self", ".", "_binset", "[", "-", "1", "]", ":", "self", ".", "_binset", "=", "self", ".", "_binset", "[", ":", ":", "-", "1", "]", "self", ".", "_bin_edges", "=", "binning", ".", "calculate_bin_edges", "(", "self", ".", "_binset", ")", "# Merge bin edges and centers in with the natural waveset", "spwave", "=", "utils", ".", "merge_wavelengths", "(", "self", ".", "_bin_edges", ".", "value", ",", "self", ".", "_binset", ".", "value", ")", "if", "self", ".", "waveset", "is", "not", "None", ":", "spwave", "=", "utils", ".", "merge_wavelengths", "(", "spwave", ",", "self", ".", "waveset", ".", "value", ")", "# Throw out invalid wavelengths after merging.", "spwave", "=", "spwave", "[", "spwave", ">", "0", "]", "# Compute indices associated to each endpoint.", "indices", "=", "np", ".", "searchsorted", "(", "spwave", ",", "self", ".", "_bin_edges", ".", "value", ")", "i_beg", "=", "indices", "[", ":", "-", "1", "]", "i_end", "=", "indices", "[", "1", ":", "]", "# Prepare integration variables.", "flux", "=", "self", "(", "spwave", ")", "avflux", "=", "(", "flux", ".", "value", "[", "1", ":", "]", "+", "flux", ".", "value", "[", ":", "-", "1", "]", ")", "*", "0.5", "deltaw", "=", "spwave", "[", "1", ":", "]", "-", "spwave", "[", ":", "-", "1", "]", "# Sum over each bin.", "binflux", ",", "intwave", "=", "binning", ".", "calcbinflux", "(", "self", ".", "_binset", ".", "size", ",", "i_beg", ",", "i_end", ",", "avflux", ",", "deltaw", ")", "self", ".", "_binflux", "=", "binflux", "*", "flux", ".", "unit" ]
Calculated binned wavelength centers, edges, and flux. By contrast, the native waveset and flux should be considered samples of a continuous function. Thus, it makes sense to interpolate ``self.waveset`` and ``self(self.waveset)``, but not `binset` and `binflux`.
[ "Calculated", "binned", "wavelength", "centers", "edges", "and", "flux", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L142-L196
spacetelescope/synphot_refactor
synphot/observation.py
Observation.sample_binned
def sample_binned(self, wavelengths=None, flux_unit=None, **kwargs): """Sample binned observation without interpolation. To sample unbinned data, use ``__call__``. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `binset` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit. If not given, internal unit is used. kwargs : dict Keywords acceptable by :func:`~synphot.units.convert_flux`. Returns ------- flux : `~astropy.units.quantity.Quantity` Binned flux in given unit. Raises ------ synphot.exceptions.InterpolationNotAllowed Interpolation of binned data is not allowed. """ x = self._validate_binned_wavelengths(wavelengths) i = np.searchsorted(self.binset, x) if not np.allclose(self.binset[i].value, x.value): raise exceptions.InterpolationNotAllowed( 'Some or all wavelength values are not in binset.') y = self.binflux[i] if flux_unit is None: flux = y else: flux = units.convert_flux(x, y, flux_unit, **kwargs) return flux
python
def sample_binned(self, wavelengths=None, flux_unit=None, **kwargs): """Sample binned observation without interpolation. To sample unbinned data, use ``__call__``. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `binset` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit. If not given, internal unit is used. kwargs : dict Keywords acceptable by :func:`~synphot.units.convert_flux`. Returns ------- flux : `~astropy.units.quantity.Quantity` Binned flux in given unit. Raises ------ synphot.exceptions.InterpolationNotAllowed Interpolation of binned data is not allowed. """ x = self._validate_binned_wavelengths(wavelengths) i = np.searchsorted(self.binset, x) if not np.allclose(self.binset[i].value, x.value): raise exceptions.InterpolationNotAllowed( 'Some or all wavelength values are not in binset.') y = self.binflux[i] if flux_unit is None: flux = y else: flux = units.convert_flux(x, y, flux_unit, **kwargs) return flux
[ "def", "sample_binned", "(", "self", ",", "wavelengths", "=", "None", ",", "flux_unit", "=", "None", ",", "*", "*", "kwargs", ")", ":", "x", "=", "self", ".", "_validate_binned_wavelengths", "(", "wavelengths", ")", "i", "=", "np", ".", "searchsorted", "(", "self", ".", "binset", ",", "x", ")", "if", "not", "np", ".", "allclose", "(", "self", ".", "binset", "[", "i", "]", ".", "value", ",", "x", ".", "value", ")", ":", "raise", "exceptions", ".", "InterpolationNotAllowed", "(", "'Some or all wavelength values are not in binset.'", ")", "y", "=", "self", ".", "binflux", "[", "i", "]", "if", "flux_unit", "is", "None", ":", "flux", "=", "y", "else", ":", "flux", "=", "units", ".", "convert_flux", "(", "x", ",", "y", ",", "flux_unit", ",", "*", "*", "kwargs", ")", "return", "flux" ]
Sample binned observation without interpolation. To sample unbinned data, use ``__call__``. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, `binset` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit. If not given, internal unit is used. kwargs : dict Keywords acceptable by :func:`~synphot.units.convert_flux`. Returns ------- flux : `~astropy.units.quantity.Quantity` Binned flux in given unit. Raises ------ synphot.exceptions.InterpolationNotAllowed Interpolation of binned data is not allowed.
[ "Sample", "binned", "observation", "without", "interpolation", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L241-L283
spacetelescope/synphot_refactor
synphot/observation.py
Observation._get_binned_arrays
def _get_binned_arrays(self, wavelengths, flux_unit, area=None, vegaspec=None): """Get binned observation in user units.""" x = self._validate_binned_wavelengths(wavelengths) y = self.sample_binned(wavelengths=x, flux_unit=flux_unit, area=area, vegaspec=vegaspec) if isinstance(wavelengths, u.Quantity): w = x.to(wavelengths.unit, u.spectral()) else: w = x return w, y
python
def _get_binned_arrays(self, wavelengths, flux_unit, area=None, vegaspec=None): """Get binned observation in user units.""" x = self._validate_binned_wavelengths(wavelengths) y = self.sample_binned(wavelengths=x, flux_unit=flux_unit, area=area, vegaspec=vegaspec) if isinstance(wavelengths, u.Quantity): w = x.to(wavelengths.unit, u.spectral()) else: w = x return w, y
[ "def", "_get_binned_arrays", "(", "self", ",", "wavelengths", ",", "flux_unit", ",", "area", "=", "None", ",", "vegaspec", "=", "None", ")", ":", "x", "=", "self", ".", "_validate_binned_wavelengths", "(", "wavelengths", ")", "y", "=", "self", ".", "sample_binned", "(", "wavelengths", "=", "x", ",", "flux_unit", "=", "flux_unit", ",", "area", "=", "area", ",", "vegaspec", "=", "vegaspec", ")", "if", "isinstance", "(", "wavelengths", ",", "u", ".", "Quantity", ")", ":", "w", "=", "x", ".", "to", "(", "wavelengths", ".", "unit", ",", "u", ".", "spectral", "(", ")", ")", "else", ":", "w", "=", "x", "return", "w", ",", "y" ]
Get binned observation in user units.
[ "Get", "binned", "observation", "in", "user", "units", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L285-L297
spacetelescope/synphot_refactor
synphot/observation.py
Observation.binned_waverange
def binned_waverange(self, cenwave, npix, **kwargs): """Calculate the wavelength range covered by the given number of pixels centered on the given central wavelengths of `binset`. Parameters ---------- cenwave : float or `~astropy.units.quantity.Quantity` Desired central wavelength. If not a Quantity, assumed to be in Angstrom. npix : int Desired number of pixels, centered on ``cenwave``. kwargs : dict Keywords accepted by :func:`synphot.binning.wave_range`. Returns ------- waverange : `~astropy.units.quantity.Quantity` Lower and upper limits of the wavelength range, in the unit of ``cenwave``. """ # Calculation is done in the unit of cenwave. if not isinstance(cenwave, u.Quantity): cenwave = cenwave * self._internal_wave_unit bin_wave = units.validate_quantity( self.binset, cenwave.unit, equivalencies=u.spectral()) return binning.wave_range( bin_wave.value, cenwave.value, npix, **kwargs) * cenwave.unit
python
def binned_waverange(self, cenwave, npix, **kwargs): """Calculate the wavelength range covered by the given number of pixels centered on the given central wavelengths of `binset`. Parameters ---------- cenwave : float or `~astropy.units.quantity.Quantity` Desired central wavelength. If not a Quantity, assumed to be in Angstrom. npix : int Desired number of pixels, centered on ``cenwave``. kwargs : dict Keywords accepted by :func:`synphot.binning.wave_range`. Returns ------- waverange : `~astropy.units.quantity.Quantity` Lower and upper limits of the wavelength range, in the unit of ``cenwave``. """ # Calculation is done in the unit of cenwave. if not isinstance(cenwave, u.Quantity): cenwave = cenwave * self._internal_wave_unit bin_wave = units.validate_quantity( self.binset, cenwave.unit, equivalencies=u.spectral()) return binning.wave_range( bin_wave.value, cenwave.value, npix, **kwargs) * cenwave.unit
[ "def", "binned_waverange", "(", "self", ",", "cenwave", ",", "npix", ",", "*", "*", "kwargs", ")", ":", "# Calculation is done in the unit of cenwave.", "if", "not", "isinstance", "(", "cenwave", ",", "u", ".", "Quantity", ")", ":", "cenwave", "=", "cenwave", "*", "self", ".", "_internal_wave_unit", "bin_wave", "=", "units", ".", "validate_quantity", "(", "self", ".", "binset", ",", "cenwave", ".", "unit", ",", "equivalencies", "=", "u", ".", "spectral", "(", ")", ")", "return", "binning", ".", "wave_range", "(", "bin_wave", ".", "value", ",", "cenwave", ".", "value", ",", "npix", ",", "*", "*", "kwargs", ")", "*", "cenwave", ".", "unit" ]
Calculate the wavelength range covered by the given number of pixels centered on the given central wavelengths of `binset`. Parameters ---------- cenwave : float or `~astropy.units.quantity.Quantity` Desired central wavelength. If not a Quantity, assumed to be in Angstrom. npix : int Desired number of pixels, centered on ``cenwave``. kwargs : dict Keywords accepted by :func:`synphot.binning.wave_range`. Returns ------- waverange : `~astropy.units.quantity.Quantity` Lower and upper limits of the wavelength range, in the unit of ``cenwave``.
[ "Calculate", "the", "wavelength", "range", "covered", "by", "the", "given", "number", "of", "pixels", "centered", "on", "the", "given", "central", "wavelengths", "of", "binset", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L299-L331
spacetelescope/synphot_refactor
synphot/observation.py
Observation.binned_pixelrange
def binned_pixelrange(self, waverange, **kwargs): """Calculate the number of pixels within the given wavelength range and `binset`. Parameters ---------- waverange : tuple of float or `~astropy.units.quantity.Quantity` Lower and upper limits of the desired wavelength range. If not a Quantity, assumed to be in Angstrom. kwargs : dict Keywords accepted by :func:`synphot.binning.pixel_range`. Returns ------- npix : int Number of pixels. """ x = units.validate_quantity( waverange, self._internal_wave_unit, equivalencies=u.spectral()) return binning.pixel_range(self.binset.value, x.value, **kwargs)
python
def binned_pixelrange(self, waverange, **kwargs): """Calculate the number of pixels within the given wavelength range and `binset`. Parameters ---------- waverange : tuple of float or `~astropy.units.quantity.Quantity` Lower and upper limits of the desired wavelength range. If not a Quantity, assumed to be in Angstrom. kwargs : dict Keywords accepted by :func:`synphot.binning.pixel_range`. Returns ------- npix : int Number of pixels. """ x = units.validate_quantity( waverange, self._internal_wave_unit, equivalencies=u.spectral()) return binning.pixel_range(self.binset.value, x.value, **kwargs)
[ "def", "binned_pixelrange", "(", "self", ",", "waverange", ",", "*", "*", "kwargs", ")", ":", "x", "=", "units", ".", "validate_quantity", "(", "waverange", ",", "self", ".", "_internal_wave_unit", ",", "equivalencies", "=", "u", ".", "spectral", "(", ")", ")", "return", "binning", ".", "pixel_range", "(", "self", ".", "binset", ".", "value", ",", "x", ".", "value", ",", "*", "*", "kwargs", ")" ]
Calculate the number of pixels within the given wavelength range and `binset`. Parameters ---------- waverange : tuple of float or `~astropy.units.quantity.Quantity` Lower and upper limits of the desired wavelength range. If not a Quantity, assumed to be in Angstrom. kwargs : dict Keywords accepted by :func:`synphot.binning.pixel_range`. Returns ------- npix : int Number of pixels.
[ "Calculate", "the", "number", "of", "pixels", "within", "the", "given", "wavelength", "range", "and", "binset", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L333-L354
spacetelescope/synphot_refactor
synphot/observation.py
Observation.effective_wavelength
def effective_wavelength(self, binned=True, wavelengths=None, mode='efflerg'): """Calculate :ref:`effective wavelength <synphot-formula-effwave>`. Parameters ---------- binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. mode : {'efflerg', 'efflphot'} Flux is first converted to the unit below before calculation: * 'efflerg' - FLAM * 'efflphot' - PHOTLAM (deprecated) Returns ------- eff_lam : `~astropy.units.quantity.Quantity` Observation effective wavelength. Raises ------ synphot.exceptions.SynphotError Invalid mode. """ mode = mode.lower() if mode == 'efflerg': flux_unit = units.FLAM elif mode == 'efflphot': warnings.warn( 'Usage of EFFLPHOT is deprecated.', AstropyDeprecationWarning) flux_unit = units.PHOTLAM else: raise exceptions.SynphotError( 'mode must be "efflerg" or "efflphot"') if binned: x = self._validate_binned_wavelengths(wavelengths).value y = self.sample_binned(wavelengths=x, flux_unit=flux_unit).value else: x = self._validate_wavelengths(wavelengths).value y = units.convert_flux(x, self(x), flux_unit).value num = np.trapz(y * x ** 2, x=x) den = np.trapz(y * x, x=x) if den == 0.0: # pragma: no cover eff_lam = 0.0 else: eff_lam = abs(num / den) return eff_lam * self._internal_wave_unit
python
def effective_wavelength(self, binned=True, wavelengths=None, mode='efflerg'): """Calculate :ref:`effective wavelength <synphot-formula-effwave>`. Parameters ---------- binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. mode : {'efflerg', 'efflphot'} Flux is first converted to the unit below before calculation: * 'efflerg' - FLAM * 'efflphot' - PHOTLAM (deprecated) Returns ------- eff_lam : `~astropy.units.quantity.Quantity` Observation effective wavelength. Raises ------ synphot.exceptions.SynphotError Invalid mode. """ mode = mode.lower() if mode == 'efflerg': flux_unit = units.FLAM elif mode == 'efflphot': warnings.warn( 'Usage of EFFLPHOT is deprecated.', AstropyDeprecationWarning) flux_unit = units.PHOTLAM else: raise exceptions.SynphotError( 'mode must be "efflerg" or "efflphot"') if binned: x = self._validate_binned_wavelengths(wavelengths).value y = self.sample_binned(wavelengths=x, flux_unit=flux_unit).value else: x = self._validate_wavelengths(wavelengths).value y = units.convert_flux(x, self(x), flux_unit).value num = np.trapz(y * x ** 2, x=x) den = np.trapz(y * x, x=x) if den == 0.0: # pragma: no cover eff_lam = 0.0 else: eff_lam = abs(num / den) return eff_lam * self._internal_wave_unit
[ "def", "effective_wavelength", "(", "self", ",", "binned", "=", "True", ",", "wavelengths", "=", "None", ",", "mode", "=", "'efflerg'", ")", ":", "mode", "=", "mode", ".", "lower", "(", ")", "if", "mode", "==", "'efflerg'", ":", "flux_unit", "=", "units", ".", "FLAM", "elif", "mode", "==", "'efflphot'", ":", "warnings", ".", "warn", "(", "'Usage of EFFLPHOT is deprecated.'", ",", "AstropyDeprecationWarning", ")", "flux_unit", "=", "units", ".", "PHOTLAM", "else", ":", "raise", "exceptions", ".", "SynphotError", "(", "'mode must be \"efflerg\" or \"efflphot\"'", ")", "if", "binned", ":", "x", "=", "self", ".", "_validate_binned_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", ".", "sample_binned", "(", "wavelengths", "=", "x", ",", "flux_unit", "=", "flux_unit", ")", ".", "value", "else", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "units", ".", "convert_flux", "(", "x", ",", "self", "(", "x", ")", ",", "flux_unit", ")", ".", "value", "num", "=", "np", ".", "trapz", "(", "y", "*", "x", "**", "2", ",", "x", "=", "x", ")", "den", "=", "np", ".", "trapz", "(", "y", "*", "x", ",", "x", "=", "x", ")", "if", "den", "==", "0.0", ":", "# pragma: no cover", "eff_lam", "=", "0.0", "else", ":", "eff_lam", "=", "abs", "(", "num", "/", "den", ")", "return", "eff_lam", "*", "self", ".", "_internal_wave_unit" ]
Calculate :ref:`effective wavelength <synphot-formula-effwave>`. Parameters ---------- binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. mode : {'efflerg', 'efflphot'} Flux is first converted to the unit below before calculation: * 'efflerg' - FLAM * 'efflphot' - PHOTLAM (deprecated) Returns ------- eff_lam : `~astropy.units.quantity.Quantity` Observation effective wavelength. Raises ------ synphot.exceptions.SynphotError Invalid mode.
[ "Calculate", ":", "ref", ":", "effective", "wavelength", "<synphot", "-", "formula", "-", "effwave", ">", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L356-L415
spacetelescope/synphot_refactor
synphot/observation.py
Observation.effstim
def effstim(self, flux_unit=None, wavelengths=None, area=None, vegaspec=None): """Calculate :ref:`effective stimulus <synphot-formula-effstim>` for given flux unit. Parameters ---------- flux_unit : str or `~astropy.units.core.Unit` or `None` The unit of effective stimulus. COUNT gives result in count/s (see :meth:`countrate` for more options). If not given, internal unit is used. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. This must be given if ``self.waveset`` is undefined for the underlying spectrum model(s). If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. area, vegaspec See :func:`~synphot.units.convert_flux`. Returns ------- eff_stim : `~astropy.units.quantity.Quantity` Observation effective stimulus based on given flux unit. """ if flux_unit is None: flux_unit = self._internal_flux_unit flux_unit = units.validate_unit(flux_unit) flux_unit_name = flux_unit.to_string() # Special handling of COUNT/OBMAG. # This is special case of countrate calculations. if flux_unit == u.count or flux_unit_name == units.OBMAG.to_string(): val = self.countrate(area, binned=False, wavelengths=wavelengths) if flux_unit.decompose() == u.mag: eff_stim = (-2.5 * np.log10(val.value)) * flux_unit else: eff_stim = val return eff_stim # Special handling of VEGAMAG. # This is basically effstim(self)/effstim(Vega) if flux_unit_name == units.VEGAMAG.to_string(): num = self.integrate(wavelengths=wavelengths) den = (vegaspec * self.bandpass).integrate() utils.validate_totalflux(num) utils.validate_totalflux(den) return (2.5 * (math.log10(den.value) - math.log10(num.value))) * units.VEGAMAG # Sample the bandpass x_band = self.bandpass._validate_wavelengths(wavelengths).value y_band = self.bandpass(x_band).value # Sample the observation in FLAM inwave = self._validate_wavelengths(wavelengths).value influx = units.convert_flux(inwave, self(inwave), units.FLAM).value # Integrate num = abs(np.trapz(inwave * influx, x=inwave)) den = abs(np.trapz(x_band * y_band, x=x_band)) utils.validate_totalflux(num) utils.validate_totalflux(den) val = (num / den) * units.FLAM # Integration should always be done in FLAM and then # converted to desired units as follows. if flux_unit.physical_type == 'spectral flux density wav': if flux_unit == u.STmag: eff_stim = val.to(flux_unit) else: # FLAM eff_stim = val elif flux_unit.physical_type in ( 'spectral flux density', 'photon flux density', 'photon flux density wav'): w_pivot = self.bandpass.pivot() eff_stim = units.convert_flux(w_pivot, val, flux_unit) else: raise exceptions.SynphotError( 'Flux unit {0} is invalid'.format(flux_unit)) return eff_stim
python
def effstim(self, flux_unit=None, wavelengths=None, area=None, vegaspec=None): """Calculate :ref:`effective stimulus <synphot-formula-effstim>` for given flux unit. Parameters ---------- flux_unit : str or `~astropy.units.core.Unit` or `None` The unit of effective stimulus. COUNT gives result in count/s (see :meth:`countrate` for more options). If not given, internal unit is used. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. This must be given if ``self.waveset`` is undefined for the underlying spectrum model(s). If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. area, vegaspec See :func:`~synphot.units.convert_flux`. Returns ------- eff_stim : `~astropy.units.quantity.Quantity` Observation effective stimulus based on given flux unit. """ if flux_unit is None: flux_unit = self._internal_flux_unit flux_unit = units.validate_unit(flux_unit) flux_unit_name = flux_unit.to_string() # Special handling of COUNT/OBMAG. # This is special case of countrate calculations. if flux_unit == u.count or flux_unit_name == units.OBMAG.to_string(): val = self.countrate(area, binned=False, wavelengths=wavelengths) if flux_unit.decompose() == u.mag: eff_stim = (-2.5 * np.log10(val.value)) * flux_unit else: eff_stim = val return eff_stim # Special handling of VEGAMAG. # This is basically effstim(self)/effstim(Vega) if flux_unit_name == units.VEGAMAG.to_string(): num = self.integrate(wavelengths=wavelengths) den = (vegaspec * self.bandpass).integrate() utils.validate_totalflux(num) utils.validate_totalflux(den) return (2.5 * (math.log10(den.value) - math.log10(num.value))) * units.VEGAMAG # Sample the bandpass x_band = self.bandpass._validate_wavelengths(wavelengths).value y_band = self.bandpass(x_band).value # Sample the observation in FLAM inwave = self._validate_wavelengths(wavelengths).value influx = units.convert_flux(inwave, self(inwave), units.FLAM).value # Integrate num = abs(np.trapz(inwave * influx, x=inwave)) den = abs(np.trapz(x_band * y_band, x=x_band)) utils.validate_totalflux(num) utils.validate_totalflux(den) val = (num / den) * units.FLAM # Integration should always be done in FLAM and then # converted to desired units as follows. if flux_unit.physical_type == 'spectral flux density wav': if flux_unit == u.STmag: eff_stim = val.to(flux_unit) else: # FLAM eff_stim = val elif flux_unit.physical_type in ( 'spectral flux density', 'photon flux density', 'photon flux density wav'): w_pivot = self.bandpass.pivot() eff_stim = units.convert_flux(w_pivot, val, flux_unit) else: raise exceptions.SynphotError( 'Flux unit {0} is invalid'.format(flux_unit)) return eff_stim
[ "def", "effstim", "(", "self", ",", "flux_unit", "=", "None", ",", "wavelengths", "=", "None", ",", "area", "=", "None", ",", "vegaspec", "=", "None", ")", ":", "if", "flux_unit", "is", "None", ":", "flux_unit", "=", "self", ".", "_internal_flux_unit", "flux_unit", "=", "units", ".", "validate_unit", "(", "flux_unit", ")", "flux_unit_name", "=", "flux_unit", ".", "to_string", "(", ")", "# Special handling of COUNT/OBMAG.", "# This is special case of countrate calculations.", "if", "flux_unit", "==", "u", ".", "count", "or", "flux_unit_name", "==", "units", ".", "OBMAG", ".", "to_string", "(", ")", ":", "val", "=", "self", ".", "countrate", "(", "area", ",", "binned", "=", "False", ",", "wavelengths", "=", "wavelengths", ")", "if", "flux_unit", ".", "decompose", "(", ")", "==", "u", ".", "mag", ":", "eff_stim", "=", "(", "-", "2.5", "*", "np", ".", "log10", "(", "val", ".", "value", ")", ")", "*", "flux_unit", "else", ":", "eff_stim", "=", "val", "return", "eff_stim", "# Special handling of VEGAMAG.", "# This is basically effstim(self)/effstim(Vega)", "if", "flux_unit_name", "==", "units", ".", "VEGAMAG", ".", "to_string", "(", ")", ":", "num", "=", "self", ".", "integrate", "(", "wavelengths", "=", "wavelengths", ")", "den", "=", "(", "vegaspec", "*", "self", ".", "bandpass", ")", ".", "integrate", "(", ")", "utils", ".", "validate_totalflux", "(", "num", ")", "utils", ".", "validate_totalflux", "(", "den", ")", "return", "(", "2.5", "*", "(", "math", ".", "log10", "(", "den", ".", "value", ")", "-", "math", ".", "log10", "(", "num", ".", "value", ")", ")", ")", "*", "units", ".", "VEGAMAG", "# Sample the bandpass", "x_band", "=", "self", ".", "bandpass", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y_band", "=", "self", ".", "bandpass", "(", "x_band", ")", ".", "value", "# Sample the observation in FLAM", "inwave", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "influx", "=", "units", ".", "convert_flux", "(", "inwave", ",", "self", "(", "inwave", ")", ",", "units", ".", "FLAM", ")", ".", "value", "# Integrate", "num", "=", "abs", "(", "np", ".", "trapz", "(", "inwave", "*", "influx", ",", "x", "=", "inwave", ")", ")", "den", "=", "abs", "(", "np", ".", "trapz", "(", "x_band", "*", "y_band", ",", "x", "=", "x_band", ")", ")", "utils", ".", "validate_totalflux", "(", "num", ")", "utils", ".", "validate_totalflux", "(", "den", ")", "val", "=", "(", "num", "/", "den", ")", "*", "units", ".", "FLAM", "# Integration should always be done in FLAM and then", "# converted to desired units as follows.", "if", "flux_unit", ".", "physical_type", "==", "'spectral flux density wav'", ":", "if", "flux_unit", "==", "u", ".", "STmag", ":", "eff_stim", "=", "val", ".", "to", "(", "flux_unit", ")", "else", ":", "# FLAM", "eff_stim", "=", "val", "elif", "flux_unit", ".", "physical_type", "in", "(", "'spectral flux density'", ",", "'photon flux density'", ",", "'photon flux density wav'", ")", ":", "w_pivot", "=", "self", ".", "bandpass", ".", "pivot", "(", ")", "eff_stim", "=", "units", ".", "convert_flux", "(", "w_pivot", ",", "val", ",", "flux_unit", ")", "else", ":", "raise", "exceptions", ".", "SynphotError", "(", "'Flux unit {0} is invalid'", ".", "format", "(", "flux_unit", ")", ")", "return", "eff_stim" ]
Calculate :ref:`effective stimulus <synphot-formula-effstim>` for given flux unit. Parameters ---------- flux_unit : str or `~astropy.units.core.Unit` or `None` The unit of effective stimulus. COUNT gives result in count/s (see :meth:`countrate` for more options). If not given, internal unit is used. wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. This must be given if ``self.waveset`` is undefined for the underlying spectrum model(s). If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. area, vegaspec See :func:`~synphot.units.convert_flux`. Returns ------- eff_stim : `~astropy.units.quantity.Quantity` Observation effective stimulus based on given flux unit.
[ "Calculate", ":", "ref", ":", "effective", "stimulus", "<synphot", "-", "formula", "-", "effstim", ">", "for", "given", "flux", "unit", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L418-L505
spacetelescope/synphot_refactor
synphot/observation.py
Observation.countrate
def countrate(self, area, binned=True, wavelengths=None, waverange=None, force=False): """Calculate :ref:`effective stimulus <synphot-formula-effstim>` in count/s. Parameters ---------- area : float or `~astropy.units.quantity.Quantity` Area that flux covers. If not a Quantity, assumed to be in :math:`cm^{2}`. binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. This must be given if ``self.waveset`` is undefined for the underlying spectrum model(s). If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. waverange : tuple of float, Quantity, or `None` Lower and upper limits of the desired wavelength range. If not a Quantity, assumed to be in Angstrom. If `None`, the full range is used. force : bool If a wavelength range is given, partial overlap raises an exception when this is `False` (default). Otherwise, it returns calculation for the overlapping region. Disjoint wavelength range raises an exception regardless. Returns ------- count_rate : `~astropy.units.quantity.Quantity` Observation effective stimulus in count/s. Raises ------ synphot.exceptions.DisjointError Wavelength range does not overlap with observation. synphot.exceptions.PartialOverlap Wavelength range only partially overlaps with observation. synphot.exceptions.SynphotError Calculation failed. """ # Sample the observation if binned: x = self._validate_binned_wavelengths(wavelengths).value y = self.sample_binned(wavelengths=x, flux_unit=u.count, area=area).value else: x = self._validate_wavelengths(wavelengths).value y = units.convert_flux(x, self(x), u.count, area=area).value # Use entire wavelength range if waverange is None: influx = y # Use given wavelength range else: w = units.validate_quantity(waverange, self._internal_wave_unit, equivalencies=u.spectral()).value stat = utils.overlap_status(w, x) w1 = w.min() w2 = w.max() if stat == 'none': raise exceptions.DisjointError( 'Observation and wavelength range are disjoint.') elif 'partial' in stat: if force: warnings.warn( 'Count rate calculated only for wavelengths in the ' 'overlap between observation and given range.', AstropyUserWarning) w1 = max(w1, x.min()) w2 = min(w2, x.max()) else: raise exceptions.PartialOverlap( 'Observation and wavelength range do not fully ' 'overlap. You may use force=True to force this ' 'calculation anyway.') elif stat != 'full': # pragma: no cover raise exceptions.SynphotError( 'Overlap result of {0} is unexpected'.format(stat)) if binned: if wavelengths is None: bin_edges = self.bin_edges.value else: bin_edges = binning.calculate_bin_edges(x).value i1 = np.searchsorted(bin_edges, w1) - 1 i2 = np.searchsorted(bin_edges, w2) influx = y[i1:i2] else: mask = ((x >= w1) & (x <= w2)) influx = y[mask] val = math.fsum(influx) utils.validate_totalflux(val) return val * (u.count / u.s)
python
def countrate(self, area, binned=True, wavelengths=None, waverange=None, force=False): """Calculate :ref:`effective stimulus <synphot-formula-effstim>` in count/s. Parameters ---------- area : float or `~astropy.units.quantity.Quantity` Area that flux covers. If not a Quantity, assumed to be in :math:`cm^{2}`. binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. This must be given if ``self.waveset`` is undefined for the underlying spectrum model(s). If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. waverange : tuple of float, Quantity, or `None` Lower and upper limits of the desired wavelength range. If not a Quantity, assumed to be in Angstrom. If `None`, the full range is used. force : bool If a wavelength range is given, partial overlap raises an exception when this is `False` (default). Otherwise, it returns calculation for the overlapping region. Disjoint wavelength range raises an exception regardless. Returns ------- count_rate : `~astropy.units.quantity.Quantity` Observation effective stimulus in count/s. Raises ------ synphot.exceptions.DisjointError Wavelength range does not overlap with observation. synphot.exceptions.PartialOverlap Wavelength range only partially overlaps with observation. synphot.exceptions.SynphotError Calculation failed. """ # Sample the observation if binned: x = self._validate_binned_wavelengths(wavelengths).value y = self.sample_binned(wavelengths=x, flux_unit=u.count, area=area).value else: x = self._validate_wavelengths(wavelengths).value y = units.convert_flux(x, self(x), u.count, area=area).value # Use entire wavelength range if waverange is None: influx = y # Use given wavelength range else: w = units.validate_quantity(waverange, self._internal_wave_unit, equivalencies=u.spectral()).value stat = utils.overlap_status(w, x) w1 = w.min() w2 = w.max() if stat == 'none': raise exceptions.DisjointError( 'Observation and wavelength range are disjoint.') elif 'partial' in stat: if force: warnings.warn( 'Count rate calculated only for wavelengths in the ' 'overlap between observation and given range.', AstropyUserWarning) w1 = max(w1, x.min()) w2 = min(w2, x.max()) else: raise exceptions.PartialOverlap( 'Observation and wavelength range do not fully ' 'overlap. You may use force=True to force this ' 'calculation anyway.') elif stat != 'full': # pragma: no cover raise exceptions.SynphotError( 'Overlap result of {0} is unexpected'.format(stat)) if binned: if wavelengths is None: bin_edges = self.bin_edges.value else: bin_edges = binning.calculate_bin_edges(x).value i1 = np.searchsorted(bin_edges, w1) - 1 i2 = np.searchsorted(bin_edges, w2) influx = y[i1:i2] else: mask = ((x >= w1) & (x <= w2)) influx = y[mask] val = math.fsum(influx) utils.validate_totalflux(val) return val * (u.count / u.s)
[ "def", "countrate", "(", "self", ",", "area", ",", "binned", "=", "True", ",", "wavelengths", "=", "None", ",", "waverange", "=", "None", ",", "force", "=", "False", ")", ":", "# Sample the observation", "if", "binned", ":", "x", "=", "self", ".", "_validate_binned_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "self", ".", "sample_binned", "(", "wavelengths", "=", "x", ",", "flux_unit", "=", "u", ".", "count", ",", "area", "=", "area", ")", ".", "value", "else", ":", "x", "=", "self", ".", "_validate_wavelengths", "(", "wavelengths", ")", ".", "value", "y", "=", "units", ".", "convert_flux", "(", "x", ",", "self", "(", "x", ")", ",", "u", ".", "count", ",", "area", "=", "area", ")", ".", "value", "# Use entire wavelength range", "if", "waverange", "is", "None", ":", "influx", "=", "y", "# Use given wavelength range", "else", ":", "w", "=", "units", ".", "validate_quantity", "(", "waverange", ",", "self", ".", "_internal_wave_unit", ",", "equivalencies", "=", "u", ".", "spectral", "(", ")", ")", ".", "value", "stat", "=", "utils", ".", "overlap_status", "(", "w", ",", "x", ")", "w1", "=", "w", ".", "min", "(", ")", "w2", "=", "w", ".", "max", "(", ")", "if", "stat", "==", "'none'", ":", "raise", "exceptions", ".", "DisjointError", "(", "'Observation and wavelength range are disjoint.'", ")", "elif", "'partial'", "in", "stat", ":", "if", "force", ":", "warnings", ".", "warn", "(", "'Count rate calculated only for wavelengths in the '", "'overlap between observation and given range.'", ",", "AstropyUserWarning", ")", "w1", "=", "max", "(", "w1", ",", "x", ".", "min", "(", ")", ")", "w2", "=", "min", "(", "w2", ",", "x", ".", "max", "(", ")", ")", "else", ":", "raise", "exceptions", ".", "PartialOverlap", "(", "'Observation and wavelength range do not fully '", "'overlap. You may use force=True to force this '", "'calculation anyway.'", ")", "elif", "stat", "!=", "'full'", ":", "# pragma: no cover", "raise", "exceptions", ".", "SynphotError", "(", "'Overlap result of {0} is unexpected'", ".", "format", "(", "stat", ")", ")", "if", "binned", ":", "if", "wavelengths", "is", "None", ":", "bin_edges", "=", "self", ".", "bin_edges", ".", "value", "else", ":", "bin_edges", "=", "binning", ".", "calculate_bin_edges", "(", "x", ")", ".", "value", "i1", "=", "np", ".", "searchsorted", "(", "bin_edges", ",", "w1", ")", "-", "1", "i2", "=", "np", ".", "searchsorted", "(", "bin_edges", ",", "w2", ")", "influx", "=", "y", "[", "i1", ":", "i2", "]", "else", ":", "mask", "=", "(", "(", "x", ">=", "w1", ")", "&", "(", "x", "<=", "w2", ")", ")", "influx", "=", "y", "[", "mask", "]", "val", "=", "math", ".", "fsum", "(", "influx", ")", "utils", ".", "validate_totalflux", "(", "val", ")", "return", "val", "*", "(", "u", ".", "count", "/", "u", ".", "s", ")" ]
Calculate :ref:`effective stimulus <synphot-formula-effstim>` in count/s. Parameters ---------- area : float or `~astropy.units.quantity.Quantity` Area that flux covers. If not a Quantity, assumed to be in :math:`cm^{2}`. binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. This must be given if ``self.waveset`` is undefined for the underlying spectrum model(s). If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. waverange : tuple of float, Quantity, or `None` Lower and upper limits of the desired wavelength range. If not a Quantity, assumed to be in Angstrom. If `None`, the full range is used. force : bool If a wavelength range is given, partial overlap raises an exception when this is `False` (default). Otherwise, it returns calculation for the overlapping region. Disjoint wavelength range raises an exception regardless. Returns ------- count_rate : `~astropy.units.quantity.Quantity` Observation effective stimulus in count/s. Raises ------ synphot.exceptions.DisjointError Wavelength range does not overlap with observation. synphot.exceptions.PartialOverlap Wavelength range only partially overlaps with observation. synphot.exceptions.SynphotError Calculation failed.
[ "Calculate", ":", "ref", ":", "effective", "stimulus", "<synphot", "-", "formula", "-", "effstim", ">", "in", "count", "/", "s", "." ]
train
https://github.com/spacetelescope/synphot_refactor/blob/9c064f3cff0c41dd8acadc0f67c6350931275b9f/synphot/observation.py#L507-L614