repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
paydunya/paydunya-python
paydunya/invoice.py
Invoice.calculate_total_amt
def calculate_total_amt(self, items={}): """Returns the total amount/cost of items in the current invoice""" _items = items.items() or self.items.items() return sum(float(x[1].total_price) for x in _items)
python
def calculate_total_amt(self, items={}): """Returns the total amount/cost of items in the current invoice""" _items = items.items() or self.items.items() return sum(float(x[1].total_price) for x in _items)
[ "def", "calculate_total_amt", "(", "self", ",", "items", "=", "{", "}", ")", ":", "_items", "=", "items", ".", "items", "(", ")", "or", "self", ".", "items", ".", "items", "(", ")", "return", "sum", "(", "float", "(", "x", "[", "1", "]", ".", "total_price", ")", "for", "x", "in", "_items", ")" ]
Returns the total amount/cost of items in the current invoice
[ "Returns", "the", "total", "amount", "/", "cost", "of", "items", "in", "the", "current", "invoice" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L118-L121
paydunya/paydunya-python
paydunya/invoice.py
Invoice.__encode_items
def __encode_items(self, items): """Encodes the InvoiceItems into a JSON serializable format items = [('item_1',InvoiceItem(name='VIP Ticket', quantity=2, unit_price='3500', total_price='7000', description='VIP Tickets for party')),...] """ xs = [item._asdict() for (_key, item) in items.items()] return list(map(lambda x: dict(zip(x.keys(), x.values())), xs))
python
def __encode_items(self, items): """Encodes the InvoiceItems into a JSON serializable format items = [('item_1',InvoiceItem(name='VIP Ticket', quantity=2, unit_price='3500', total_price='7000', description='VIP Tickets for party')),...] """ xs = [item._asdict() for (_key, item) in items.items()] return list(map(lambda x: dict(zip(x.keys(), x.values())), xs))
[ "def", "__encode_items", "(", "self", ",", "items", ")", ":", "xs", "=", "[", "item", ".", "_asdict", "(", ")", "for", "(", "_key", ",", "item", ")", "in", "items", ".", "items", "(", ")", "]", "return", "list", "(", "map", "(", "lambda", "x", ":", "dict", "(", "zip", "(", "x", ".", "keys", "(", ")", ",", "x", ".", "values", "(", ")", ")", ")", ",", "xs", ")", ")" ]
Encodes the InvoiceItems into a JSON serializable format items = [('item_1',InvoiceItem(name='VIP Ticket', quantity=2, unit_price='3500', total_price='7000', description='VIP Tickets for party')),...]
[ "Encodes", "the", "InvoiceItems", "into", "a", "JSON", "serializable", "format" ]
train
https://github.com/paydunya/paydunya-python/blob/bb55791e2814788aec74162d9d78970815f37c30/paydunya/invoice.py#L123-L131
cons3rt/pycons3rt
pycons3rt/deployment.py
main
def main(): """Sample usage for this python module This main method simply illustrates sample usage for this python module. :return: None """ log = logging.getLogger(mod_logger + '.main') parser = argparse.ArgumentParser(description='cons3rt deployment CLI') parser.add_argument('command', help='Command for the deployment CLI') parser.add_argument('--network', help='Name of the network') parser.add_argument('--name', help='Name of a deployment property to get') args = parser.parse_args() valid_commands = ['ip', 'device', 'prop'] valid_commands_str = ','.join(valid_commands) # Get the command command = args.command.strip().lower() # Ensure the command is valid if command not in valid_commands: print('Invalid command found [{c}]\n'.format(c=command) + valid_commands_str) return 1 if command == 'ip': if not args.network: print('Missed arg: --network, for the name of the network') elif command == 'device': if not args.network: print('Missed arg: --network, for the name of the network') return 1 d = Deployment() print(d.get_device_for_network_linux(network_name=args.network)) elif command == 'prop': if not args.name: print('Missed arg: --name, for the name of the property to retrieve') return 1 d = Deployment() print(d.get_value(property_name=args.name))
python
def main(): """Sample usage for this python module This main method simply illustrates sample usage for this python module. :return: None """ log = logging.getLogger(mod_logger + '.main') parser = argparse.ArgumentParser(description='cons3rt deployment CLI') parser.add_argument('command', help='Command for the deployment CLI') parser.add_argument('--network', help='Name of the network') parser.add_argument('--name', help='Name of a deployment property to get') args = parser.parse_args() valid_commands = ['ip', 'device', 'prop'] valid_commands_str = ','.join(valid_commands) # Get the command command = args.command.strip().lower() # Ensure the command is valid if command not in valid_commands: print('Invalid command found [{c}]\n'.format(c=command) + valid_commands_str) return 1 if command == 'ip': if not args.network: print('Missed arg: --network, for the name of the network') elif command == 'device': if not args.network: print('Missed arg: --network, for the name of the network') return 1 d = Deployment() print(d.get_device_for_network_linux(network_name=args.network)) elif command == 'prop': if not args.name: print('Missed arg: --name, for the name of the property to retrieve') return 1 d = Deployment() print(d.get_value(property_name=args.name))
[ "def", "main", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.main'", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'cons3rt deployment CLI'", ")", "parser", ".", "add_argument", "(", "'command'", ",", "help", "=", "'Command for the deployment CLI'", ")", "parser", ".", "add_argument", "(", "'--network'", ",", "help", "=", "'Name of the network'", ")", "parser", ".", "add_argument", "(", "'--name'", ",", "help", "=", "'Name of a deployment property to get'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "valid_commands", "=", "[", "'ip'", ",", "'device'", ",", "'prop'", "]", "valid_commands_str", "=", "','", ".", "join", "(", "valid_commands", ")", "# Get the command", "command", "=", "args", ".", "command", ".", "strip", "(", ")", ".", "lower", "(", ")", "# Ensure the command is valid", "if", "command", "not", "in", "valid_commands", ":", "print", "(", "'Invalid command found [{c}]\\n'", ".", "format", "(", "c", "=", "command", ")", "+", "valid_commands_str", ")", "return", "1", "if", "command", "==", "'ip'", ":", "if", "not", "args", ".", "network", ":", "print", "(", "'Missed arg: --network, for the name of the network'", ")", "elif", "command", "==", "'device'", ":", "if", "not", "args", ".", "network", ":", "print", "(", "'Missed arg: --network, for the name of the network'", ")", "return", "1", "d", "=", "Deployment", "(", ")", "print", "(", "d", ".", "get_device_for_network_linux", "(", "network_name", "=", "args", ".", "network", ")", ")", "elif", "command", "==", "'prop'", ":", "if", "not", "args", ".", "name", ":", "print", "(", "'Missed arg: --name, for the name of the property to retrieve'", ")", "return", "1", "d", "=", "Deployment", "(", ")", "print", "(", "d", ".", "get_value", "(", "property_name", "=", "args", ".", "name", ")", ")" ]
Sample usage for this python module This main method simply illustrates sample usage for this python module. :return: None
[ "Sample", "usage", "for", "this", "python", "module" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L704-L744
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_deployment_home
def set_deployment_home(self): """Sets self.deployment_home This method finds and sets deployment home, primarily based on the DEPLOYMENT_HOME environment variable. If not set, this method will attempt to determine deployment home. :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_home') try: self.deployment_home = os.environ['DEPLOYMENT_HOME'] except KeyError: log.warn('DEPLOYMENT_HOME environment variable is not set, attempting to set it...') else: log.info('Found DEPLOYMENT_HOME environment variable set to: {d}'.format(d=self.deployment_home)) return if self.cons3rt_agent_run_dir is None: msg = 'This is not Windows nor Linux, cannot determine DEPLOYMENT_HOME' log.error(msg) raise DeploymentError(msg) # Ensure the run directory can be found if not os.path.isdir(self.cons3rt_agent_run_dir): msg = 'Could not find the cons3rt run directory, DEPLOYMENT_HOME cannot be set' log.error(msg) raise DeploymentError(msg) run_dir_contents = os.listdir(self.cons3rt_agent_run_dir) results = [] for item in run_dir_contents: if 'Deployment' in item: results.append(item) if len(results) != 1: msg = 'Could not find deployment home in the cons3rt run directory, deployment home cannot be set' log.error(msg) raise DeploymentError(msg) # Ensure the Deployment Home is a directory candidate_deployment_home = os.path.join(self.cons3rt_agent_run_dir, results[0]) if not os.path.isdir(candidate_deployment_home): msg = 'The candidate deployment home is not a valid directory: {d}'.format(d=candidate_deployment_home) log.error(msg) raise DeploymentError(msg) # Ensure the deployment properties file can be found self.deployment_home = candidate_deployment_home os.environ['DEPLOYMENT_HOME'] = self.deployment_home log.info('Set DEPLOYMENT_HOME in the environment to: {d}'.format(d=self.deployment_home))
python
def set_deployment_home(self): """Sets self.deployment_home This method finds and sets deployment home, primarily based on the DEPLOYMENT_HOME environment variable. If not set, this method will attempt to determine deployment home. :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_home') try: self.deployment_home = os.environ['DEPLOYMENT_HOME'] except KeyError: log.warn('DEPLOYMENT_HOME environment variable is not set, attempting to set it...') else: log.info('Found DEPLOYMENT_HOME environment variable set to: {d}'.format(d=self.deployment_home)) return if self.cons3rt_agent_run_dir is None: msg = 'This is not Windows nor Linux, cannot determine DEPLOYMENT_HOME' log.error(msg) raise DeploymentError(msg) # Ensure the run directory can be found if not os.path.isdir(self.cons3rt_agent_run_dir): msg = 'Could not find the cons3rt run directory, DEPLOYMENT_HOME cannot be set' log.error(msg) raise DeploymentError(msg) run_dir_contents = os.listdir(self.cons3rt_agent_run_dir) results = [] for item in run_dir_contents: if 'Deployment' in item: results.append(item) if len(results) != 1: msg = 'Could not find deployment home in the cons3rt run directory, deployment home cannot be set' log.error(msg) raise DeploymentError(msg) # Ensure the Deployment Home is a directory candidate_deployment_home = os.path.join(self.cons3rt_agent_run_dir, results[0]) if not os.path.isdir(candidate_deployment_home): msg = 'The candidate deployment home is not a valid directory: {d}'.format(d=candidate_deployment_home) log.error(msg) raise DeploymentError(msg) # Ensure the deployment properties file can be found self.deployment_home = candidate_deployment_home os.environ['DEPLOYMENT_HOME'] = self.deployment_home log.info('Set DEPLOYMENT_HOME in the environment to: {d}'.format(d=self.deployment_home))
[ "def", "set_deployment_home", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_deployment_home'", ")", "try", ":", "self", ".", "deployment_home", "=", "os", ".", "environ", "[", "'DEPLOYMENT_HOME'", "]", "except", "KeyError", ":", "log", ".", "warn", "(", "'DEPLOYMENT_HOME environment variable is not set, attempting to set it...'", ")", "else", ":", "log", ".", "info", "(", "'Found DEPLOYMENT_HOME environment variable set to: {d}'", ".", "format", "(", "d", "=", "self", ".", "deployment_home", ")", ")", "return", "if", "self", ".", "cons3rt_agent_run_dir", "is", "None", ":", "msg", "=", "'This is not Windows nor Linux, cannot determine DEPLOYMENT_HOME'", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", "(", "msg", ")", "# Ensure the run directory can be found", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "cons3rt_agent_run_dir", ")", ":", "msg", "=", "'Could not find the cons3rt run directory, DEPLOYMENT_HOME cannot be set'", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", "(", "msg", ")", "run_dir_contents", "=", "os", ".", "listdir", "(", "self", ".", "cons3rt_agent_run_dir", ")", "results", "=", "[", "]", "for", "item", "in", "run_dir_contents", ":", "if", "'Deployment'", "in", "item", ":", "results", ".", "append", "(", "item", ")", "if", "len", "(", "results", ")", "!=", "1", ":", "msg", "=", "'Could not find deployment home in the cons3rt run directory, deployment home cannot be set'", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", "(", "msg", ")", "# Ensure the Deployment Home is a directory", "candidate_deployment_home", "=", "os", ".", "path", ".", "join", "(", "self", ".", "cons3rt_agent_run_dir", ",", "results", "[", "0", "]", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "candidate_deployment_home", ")", ":", "msg", "=", "'The candidate deployment home is not a valid directory: {d}'", ".", "format", "(", "d", "=", "candidate_deployment_home", ")", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", "(", "msg", ")", "# Ensure the deployment properties file can be found", "self", ".", "deployment_home", "=", "candidate_deployment_home", "os", ".", "environ", "[", "'DEPLOYMENT_HOME'", "]", "=", "self", ".", "deployment_home", "log", ".", "info", "(", "'Set DEPLOYMENT_HOME in the environment to: {d}'", ".", "format", "(", "d", "=", "self", ".", "deployment_home", ")", ")" ]
Sets self.deployment_home This method finds and sets deployment home, primarily based on the DEPLOYMENT_HOME environment variable. If not set, this method will attempt to determine deployment home. :return: None
[ "Sets", "self", ".", "deployment_home" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L115-L164
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.read_deployment_properties
def read_deployment_properties(self): """Reads the deployment properties file This method reads the deployment properties file into the "properties" dictionary object. :return: None :raises: DeploymentError """ log = logging.getLogger(self.cls_logger + '.read_deployment_properties') # Ensure deployment properties file exists self.properties_file = os.path.join(self.deployment_home, 'deployment.properties') if not os.path.isfile(self.properties_file): msg = 'Deployment properties file not found: {f}'.format(f=self.properties_file) log.error(msg) raise DeploymentError(msg) log.info('Found deployment properties file: {f}'.format(f=self.properties_file)) log.info('Reading deployment properties...') try: f = open(self.properties_file) except (IOError, OSError): _, ex, trace = sys.exc_info() msg = 'Could not open file {file} to read property: {prop}'.format( file=self.properties_file, prop=property) log.error(msg) raise DeploymentError, msg, trace for line in f: log.debug('Processing deployment properties file line: {l}'.format(l=line)) if not isinstance(line, basestring): log.debug('Skipping line that is not a string: {l}'.format(l=line)) continue elif line.startswith('#'): log.debug('Skipping line that is a comment: {l}'.format(l=line)) continue elif '=' in line: split_line = line.strip().split('=', 1) if len(split_line) == 2: prop_name = split_line[0].strip() prop_value = split_line[1].strip() if prop_name is None or not prop_name or prop_value is None or not prop_value: log.debug('Property name <{n}> or value <v> is none or blank, not including it'.format( n=prop_name, v=prop_value)) else: log.debug('Adding property {n} with value {v}...'.format(n=prop_name, v=prop_value)) self.properties[prop_name] = prop_value else: log.debug('Skipping line that did not split into 2 part on an equal sign...') log.info('Successfully read in deployment properties')
python
def read_deployment_properties(self): """Reads the deployment properties file This method reads the deployment properties file into the "properties" dictionary object. :return: None :raises: DeploymentError """ log = logging.getLogger(self.cls_logger + '.read_deployment_properties') # Ensure deployment properties file exists self.properties_file = os.path.join(self.deployment_home, 'deployment.properties') if not os.path.isfile(self.properties_file): msg = 'Deployment properties file not found: {f}'.format(f=self.properties_file) log.error(msg) raise DeploymentError(msg) log.info('Found deployment properties file: {f}'.format(f=self.properties_file)) log.info('Reading deployment properties...') try: f = open(self.properties_file) except (IOError, OSError): _, ex, trace = sys.exc_info() msg = 'Could not open file {file} to read property: {prop}'.format( file=self.properties_file, prop=property) log.error(msg) raise DeploymentError, msg, trace for line in f: log.debug('Processing deployment properties file line: {l}'.format(l=line)) if not isinstance(line, basestring): log.debug('Skipping line that is not a string: {l}'.format(l=line)) continue elif line.startswith('#'): log.debug('Skipping line that is a comment: {l}'.format(l=line)) continue elif '=' in line: split_line = line.strip().split('=', 1) if len(split_line) == 2: prop_name = split_line[0].strip() prop_value = split_line[1].strip() if prop_name is None or not prop_name or prop_value is None or not prop_value: log.debug('Property name <{n}> or value <v> is none or blank, not including it'.format( n=prop_name, v=prop_value)) else: log.debug('Adding property {n} with value {v}...'.format(n=prop_name, v=prop_value)) self.properties[prop_name] = prop_value else: log.debug('Skipping line that did not split into 2 part on an equal sign...') log.info('Successfully read in deployment properties')
[ "def", "read_deployment_properties", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.read_deployment_properties'", ")", "# Ensure deployment properties file exists", "self", ".", "properties_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "deployment_home", ",", "'deployment.properties'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "properties_file", ")", ":", "msg", "=", "'Deployment properties file not found: {f}'", ".", "format", "(", "f", "=", "self", ".", "properties_file", ")", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", "(", "msg", ")", "log", ".", "info", "(", "'Found deployment properties file: {f}'", ".", "format", "(", "f", "=", "self", ".", "properties_file", ")", ")", "log", ".", "info", "(", "'Reading deployment properties...'", ")", "try", ":", "f", "=", "open", "(", "self", ".", "properties_file", ")", "except", "(", "IOError", ",", "OSError", ")", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Could not open file {file} to read property: {prop}'", ".", "format", "(", "file", "=", "self", ".", "properties_file", ",", "prop", "=", "property", ")", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", ",", "msg", ",", "trace", "for", "line", "in", "f", ":", "log", ".", "debug", "(", "'Processing deployment properties file line: {l}'", ".", "format", "(", "l", "=", "line", ")", ")", "if", "not", "isinstance", "(", "line", ",", "basestring", ")", ":", "log", ".", "debug", "(", "'Skipping line that is not a string: {l}'", ".", "format", "(", "l", "=", "line", ")", ")", "continue", "elif", "line", ".", "startswith", "(", "'#'", ")", ":", "log", ".", "debug", "(", "'Skipping line that is a comment: {l}'", ".", "format", "(", "l", "=", "line", ")", ")", "continue", "elif", "'='", "in", "line", ":", "split_line", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'='", ",", "1", ")", "if", "len", "(", "split_line", ")", "==", "2", ":", "prop_name", "=", "split_line", "[", "0", "]", ".", "strip", "(", ")", "prop_value", "=", "split_line", "[", "1", "]", ".", "strip", "(", ")", "if", "prop_name", "is", "None", "or", "not", "prop_name", "or", "prop_value", "is", "None", "or", "not", "prop_value", ":", "log", ".", "debug", "(", "'Property name <{n}> or value <v> is none or blank, not including it'", ".", "format", "(", "n", "=", "prop_name", ",", "v", "=", "prop_value", ")", ")", "else", ":", "log", ".", "debug", "(", "'Adding property {n} with value {v}...'", ".", "format", "(", "n", "=", "prop_name", ",", "v", "=", "prop_value", ")", ")", "self", ".", "properties", "[", "prop_name", "]", "=", "prop_value", "else", ":", "log", ".", "debug", "(", "'Skipping line that did not split into 2 part on an equal sign...'", ")", "log", ".", "info", "(", "'Successfully read in deployment properties'", ")" ]
Reads the deployment properties file This method reads the deployment properties file into the "properties" dictionary object. :return: None :raises: DeploymentError
[ "Reads", "the", "deployment", "properties", "file" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L166-L217
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.get_property
def get_property(self, regex): """Gets the name of a specific property This public method is passed a regular expression and returns the matching property name. If either the property is not found or if the passed string matches more than one property, this function will return None. :param regex: Regular expression to search on :return: (str) Property name matching the passed regex or None. """ log = logging.getLogger(self.cls_logger + '.get_property') if not isinstance(regex, basestring): log.error('regex arg is not a string found type: {t}'.format(t=regex.__class__.__name__)) return None log.debug('Looking up property based on regex: {r}'.format(r=regex)) prop_list_matched = [] for prop_name in self.properties.keys(): match = re.search(regex, prop_name) if match: prop_list_matched.append(prop_name) if len(prop_list_matched) == 1: log.debug('Found matching property: {p}'.format(p=prop_list_matched[0])) return prop_list_matched[0] elif len(prop_list_matched) > 1: log.debug('Passed regex {r} matched more than 1 property, checking for an exact match...'.format(r=regex)) for matched_prop in prop_list_matched: if matched_prop == regex: log.debug('Found an exact match: {p}'.format(p=matched_prop)) return matched_prop log.debug('Exact match not found for regex {r}, returning None'.format(r=regex)) return None else: log.debug('Passed regex did not match any deployment properties: {r}'.format(r=regex)) return None
python
def get_property(self, regex): """Gets the name of a specific property This public method is passed a regular expression and returns the matching property name. If either the property is not found or if the passed string matches more than one property, this function will return None. :param regex: Regular expression to search on :return: (str) Property name matching the passed regex or None. """ log = logging.getLogger(self.cls_logger + '.get_property') if not isinstance(regex, basestring): log.error('regex arg is not a string found type: {t}'.format(t=regex.__class__.__name__)) return None log.debug('Looking up property based on regex: {r}'.format(r=regex)) prop_list_matched = [] for prop_name in self.properties.keys(): match = re.search(regex, prop_name) if match: prop_list_matched.append(prop_name) if len(prop_list_matched) == 1: log.debug('Found matching property: {p}'.format(p=prop_list_matched[0])) return prop_list_matched[0] elif len(prop_list_matched) > 1: log.debug('Passed regex {r} matched more than 1 property, checking for an exact match...'.format(r=regex)) for matched_prop in prop_list_matched: if matched_prop == regex: log.debug('Found an exact match: {p}'.format(p=matched_prop)) return matched_prop log.debug('Exact match not found for regex {r}, returning None'.format(r=regex)) return None else: log.debug('Passed regex did not match any deployment properties: {r}'.format(r=regex)) return None
[ "def", "get_property", "(", "self", ",", "regex", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_property'", ")", "if", "not", "isinstance", "(", "regex", ",", "basestring", ")", ":", "log", ".", "error", "(", "'regex arg is not a string found type: {t}'", ".", "format", "(", "t", "=", "regex", ".", "__class__", ".", "__name__", ")", ")", "return", "None", "log", ".", "debug", "(", "'Looking up property based on regex: {r}'", ".", "format", "(", "r", "=", "regex", ")", ")", "prop_list_matched", "=", "[", "]", "for", "prop_name", "in", "self", ".", "properties", ".", "keys", "(", ")", ":", "match", "=", "re", ".", "search", "(", "regex", ",", "prop_name", ")", "if", "match", ":", "prop_list_matched", ".", "append", "(", "prop_name", ")", "if", "len", "(", "prop_list_matched", ")", "==", "1", ":", "log", ".", "debug", "(", "'Found matching property: {p}'", ".", "format", "(", "p", "=", "prop_list_matched", "[", "0", "]", ")", ")", "return", "prop_list_matched", "[", "0", "]", "elif", "len", "(", "prop_list_matched", ")", ">", "1", ":", "log", ".", "debug", "(", "'Passed regex {r} matched more than 1 property, checking for an exact match...'", ".", "format", "(", "r", "=", "regex", ")", ")", "for", "matched_prop", "in", "prop_list_matched", ":", "if", "matched_prop", "==", "regex", ":", "log", ".", "debug", "(", "'Found an exact match: {p}'", ".", "format", "(", "p", "=", "matched_prop", ")", ")", "return", "matched_prop", "log", ".", "debug", "(", "'Exact match not found for regex {r}, returning None'", ".", "format", "(", "r", "=", "regex", ")", ")", "return", "None", "else", ":", "log", ".", "debug", "(", "'Passed regex did not match any deployment properties: {r}'", ".", "format", "(", "r", "=", "regex", ")", ")", "return", "None" ]
Gets the name of a specific property This public method is passed a regular expression and returns the matching property name. If either the property is not found or if the passed string matches more than one property, this function will return None. :param regex: Regular expression to search on :return: (str) Property name matching the passed regex or None.
[ "Gets", "the", "name", "of", "a", "specific", "property" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L219-L255
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.get_matching_property_names
def get_matching_property_names(self, regex): """Returns a list of property names matching the provided regular expression :param regex: Regular expression to search on :return: (list) of property names matching the regex """ log = logging.getLogger(self.cls_logger + '.get_matching_property_names') prop_list_matched = [] if not isinstance(regex, basestring): log.warn('regex arg is not a string, found type: {t}'.format(t=regex.__class__.__name__)) return prop_list_matched log.debug('Finding properties matching regex: {r}'.format(r=regex)) for prop_name in self.properties.keys(): match = re.search(regex, prop_name) if match: prop_list_matched.append(prop_name) return prop_list_matched
python
def get_matching_property_names(self, regex): """Returns a list of property names matching the provided regular expression :param regex: Regular expression to search on :return: (list) of property names matching the regex """ log = logging.getLogger(self.cls_logger + '.get_matching_property_names') prop_list_matched = [] if not isinstance(regex, basestring): log.warn('regex arg is not a string, found type: {t}'.format(t=regex.__class__.__name__)) return prop_list_matched log.debug('Finding properties matching regex: {r}'.format(r=regex)) for prop_name in self.properties.keys(): match = re.search(regex, prop_name) if match: prop_list_matched.append(prop_name) return prop_list_matched
[ "def", "get_matching_property_names", "(", "self", ",", "regex", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_matching_property_names'", ")", "prop_list_matched", "=", "[", "]", "if", "not", "isinstance", "(", "regex", ",", "basestring", ")", ":", "log", ".", "warn", "(", "'regex arg is not a string, found type: {t}'", ".", "format", "(", "t", "=", "regex", ".", "__class__", ".", "__name__", ")", ")", "return", "prop_list_matched", "log", ".", "debug", "(", "'Finding properties matching regex: {r}'", ".", "format", "(", "r", "=", "regex", ")", ")", "for", "prop_name", "in", "self", ".", "properties", ".", "keys", "(", ")", ":", "match", "=", "re", ".", "search", "(", "regex", ",", "prop_name", ")", "if", "match", ":", "prop_list_matched", ".", "append", "(", "prop_name", ")", "return", "prop_list_matched" ]
Returns a list of property names matching the provided regular expression :param regex: Regular expression to search on :return: (list) of property names matching the regex
[ "Returns", "a", "list", "of", "property", "names", "matching", "the", "provided", "regular", "expression" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L257-L274
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.get_value
def get_value(self, property_name): """Returns the value associated to the passed property This public method is passed a specific property as a string and returns the value of that property. If the property is not found, None will be returned. :param property_name (str) The name of the property :return: (str) value for the passed property, or None. """ log = logging.getLogger(self.cls_logger + '.get_value') if not isinstance(property_name, basestring): log.error('property_name arg is not a string, found type: {t}'.format(t=property_name.__class__.__name__)) return None # Ensure a property with that name exists prop = self.get_property(property_name) if not prop: log.debug('Property name not found matching: {n}'.format(n=property_name)) return None value = self.properties[prop] log.debug('Found value for property {n}: {v}'.format(n=property_name, v=value)) return value
python
def get_value(self, property_name): """Returns the value associated to the passed property This public method is passed a specific property as a string and returns the value of that property. If the property is not found, None will be returned. :param property_name (str) The name of the property :return: (str) value for the passed property, or None. """ log = logging.getLogger(self.cls_logger + '.get_value') if not isinstance(property_name, basestring): log.error('property_name arg is not a string, found type: {t}'.format(t=property_name.__class__.__name__)) return None # Ensure a property with that name exists prop = self.get_property(property_name) if not prop: log.debug('Property name not found matching: {n}'.format(n=property_name)) return None value = self.properties[prop] log.debug('Found value for property {n}: {v}'.format(n=property_name, v=value)) return value
[ "def", "get_value", "(", "self", ",", "property_name", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_value'", ")", "if", "not", "isinstance", "(", "property_name", ",", "basestring", ")", ":", "log", ".", "error", "(", "'property_name arg is not a string, found type: {t}'", ".", "format", "(", "t", "=", "property_name", ".", "__class__", ".", "__name__", ")", ")", "return", "None", "# Ensure a property with that name exists", "prop", "=", "self", ".", "get_property", "(", "property_name", ")", "if", "not", "prop", ":", "log", ".", "debug", "(", "'Property name not found matching: {n}'", ".", "format", "(", "n", "=", "property_name", ")", ")", "return", "None", "value", "=", "self", ".", "properties", "[", "prop", "]", "log", ".", "debug", "(", "'Found value for property {n}: {v}'", ".", "format", "(", "n", "=", "property_name", ",", "v", "=", "value", ")", ")", "return", "value" ]
Returns the value associated to the passed property This public method is passed a specific property as a string and returns the value of that property. If the property is not found, None will be returned. :param property_name (str) The name of the property :return: (str) value for the passed property, or None.
[ "Returns", "the", "value", "associated", "to", "the", "passed", "property" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L276-L297
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_cons3rt_role_name
def set_cons3rt_role_name(self): """Set the cons3rt_role_name member for this system :return: None :raises: DeploymentError """ log = logging.getLogger(self.cls_logger + '.set_cons3rt_role_name') try: self.cons3rt_role_name = os.environ['CONS3RT_ROLE_NAME'] except KeyError: log.warn('CONS3RT_ROLE_NAME is not set, attempting to determine it from deployment properties...') if platform.system() == 'Linux': log.info('Attempting to determine CONS3RT_ROLE_NAME on Linux...') try: self.determine_cons3rt_role_name_linux() except DeploymentError: raise else: log.warn('Unable to determine CONS3RT_ROLE_NAME on this System') else: log.info('Found environment variable CONS3RT_ROLE_NAME: {r}'.format(r=self.cons3rt_role_name)) return
python
def set_cons3rt_role_name(self): """Set the cons3rt_role_name member for this system :return: None :raises: DeploymentError """ log = logging.getLogger(self.cls_logger + '.set_cons3rt_role_name') try: self.cons3rt_role_name = os.environ['CONS3RT_ROLE_NAME'] except KeyError: log.warn('CONS3RT_ROLE_NAME is not set, attempting to determine it from deployment properties...') if platform.system() == 'Linux': log.info('Attempting to determine CONS3RT_ROLE_NAME on Linux...') try: self.determine_cons3rt_role_name_linux() except DeploymentError: raise else: log.warn('Unable to determine CONS3RT_ROLE_NAME on this System') else: log.info('Found environment variable CONS3RT_ROLE_NAME: {r}'.format(r=self.cons3rt_role_name)) return
[ "def", "set_cons3rt_role_name", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_cons3rt_role_name'", ")", "try", ":", "self", ".", "cons3rt_role_name", "=", "os", ".", "environ", "[", "'CONS3RT_ROLE_NAME'", "]", "except", "KeyError", ":", "log", ".", "warn", "(", "'CONS3RT_ROLE_NAME is not set, attempting to determine it from deployment properties...'", ")", "if", "platform", ".", "system", "(", ")", "==", "'Linux'", ":", "log", ".", "info", "(", "'Attempting to determine CONS3RT_ROLE_NAME on Linux...'", ")", "try", ":", "self", ".", "determine_cons3rt_role_name_linux", "(", ")", "except", "DeploymentError", ":", "raise", "else", ":", "log", ".", "warn", "(", "'Unable to determine CONS3RT_ROLE_NAME on this System'", ")", "else", ":", "log", ".", "info", "(", "'Found environment variable CONS3RT_ROLE_NAME: {r}'", ".", "format", "(", "r", "=", "self", ".", "cons3rt_role_name", ")", ")", "return" ]
Set the cons3rt_role_name member for this system :return: None :raises: DeploymentError
[ "Set", "the", "cons3rt_role_name", "member", "for", "this", "system" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L299-L322
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.determine_cons3rt_role_name_linux
def determine_cons3rt_role_name_linux(self): """Determines the CONS3RT_ROLE_NAME for this Linux system, and Set the cons3rt_role_name member for this system This method determines the CONS3RT_ROLE_NAME for this system in the deployment by first checking for the environment variable, if not set, determining the value from the deployment properties. :return: None :raises: DeploymentError """ log = logging.getLogger(self.cls_logger + '.determine_cons3rt_role_name_linux') # Determine IP addresses for this system log.info('Determining the IPv4 addresses for this system...') try: ip_addresses = get_ip_addresses() except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to get the IP address of this system, thus cannot determine the ' \ 'CONS3RT_ROLE_NAME\n{e}'.format(e=str(ex)) log.error(msg) raise DeploymentError, msg, trace else: log.info('Found IP addresses: {a}'.format(a=ip_addresses)) log.info('Trying to determine IP address for eth0...') try: ip_address = ip_addresses['eth0'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to determine the IP address for eth0. Found the ' \ 'following IP addresses: {i}\n{e}'.format(i=ip_addresses, e=str(ex)) log.error(msg) raise DeploymentError, msg, trace else: log.info('Found IP address for eth0: {i}'.format(i=ip_address)) pattern = '^cons3rt\.fap\.deployment\.machine.*0.internalIp=' + ip_address + '$' try: f = open(self.properties_file) except IOError: _, ex, trace = sys.exc_info() msg = 'Could not open file {f}'.format(f=self.properties_file) log.error(msg) raise DeploymentError, msg, trace prop_list_matched = [] log.debug('Searching for deployment properties matching pattern: {p}'.format(p=pattern)) for line in f: log.debug('Processing deployment properties file line: {l}'.format(l=line)) if line.startswith('#'): continue elif '=' in line: match = re.search(pattern, line) if match: log.debug('Found matching prop: {l}'.format(l=line)) prop_list_matched.append(line) log.debug('Number of matching properties found: {n}'.format(n=len(prop_list_matched))) if len(prop_list_matched) == 1: prop_parts = prop_list_matched[0].split('.') if len(prop_parts) > 5: self.cons3rt_role_name = prop_parts[4] log.info('Found CONS3RT_ROLE_NAME from deployment properties: {c}'.format(c=self.cons3rt_role_name)) log.info('Adding CONS3RT_ROLE_NAME to the current environment...') os.environ['CONS3RT_ROLE_NAME'] = self.cons3rt_role_name return else: log.error('Property found was not formatted as expected: %s', prop_parts) else: log.error('Did not find a unique matching deployment property') msg = 'Could not determine CONS3RT_ROLE_NAME from deployment properties' log.error(msg) raise DeploymentError(msg)
python
def determine_cons3rt_role_name_linux(self): """Determines the CONS3RT_ROLE_NAME for this Linux system, and Set the cons3rt_role_name member for this system This method determines the CONS3RT_ROLE_NAME for this system in the deployment by first checking for the environment variable, if not set, determining the value from the deployment properties. :return: None :raises: DeploymentError """ log = logging.getLogger(self.cls_logger + '.determine_cons3rt_role_name_linux') # Determine IP addresses for this system log.info('Determining the IPv4 addresses for this system...') try: ip_addresses = get_ip_addresses() except CommandError: _, ex, trace = sys.exc_info() msg = 'Unable to get the IP address of this system, thus cannot determine the ' \ 'CONS3RT_ROLE_NAME\n{e}'.format(e=str(ex)) log.error(msg) raise DeploymentError, msg, trace else: log.info('Found IP addresses: {a}'.format(a=ip_addresses)) log.info('Trying to determine IP address for eth0...') try: ip_address = ip_addresses['eth0'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to determine the IP address for eth0. Found the ' \ 'following IP addresses: {i}\n{e}'.format(i=ip_addresses, e=str(ex)) log.error(msg) raise DeploymentError, msg, trace else: log.info('Found IP address for eth0: {i}'.format(i=ip_address)) pattern = '^cons3rt\.fap\.deployment\.machine.*0.internalIp=' + ip_address + '$' try: f = open(self.properties_file) except IOError: _, ex, trace = sys.exc_info() msg = 'Could not open file {f}'.format(f=self.properties_file) log.error(msg) raise DeploymentError, msg, trace prop_list_matched = [] log.debug('Searching for deployment properties matching pattern: {p}'.format(p=pattern)) for line in f: log.debug('Processing deployment properties file line: {l}'.format(l=line)) if line.startswith('#'): continue elif '=' in line: match = re.search(pattern, line) if match: log.debug('Found matching prop: {l}'.format(l=line)) prop_list_matched.append(line) log.debug('Number of matching properties found: {n}'.format(n=len(prop_list_matched))) if len(prop_list_matched) == 1: prop_parts = prop_list_matched[0].split('.') if len(prop_parts) > 5: self.cons3rt_role_name = prop_parts[4] log.info('Found CONS3RT_ROLE_NAME from deployment properties: {c}'.format(c=self.cons3rt_role_name)) log.info('Adding CONS3RT_ROLE_NAME to the current environment...') os.environ['CONS3RT_ROLE_NAME'] = self.cons3rt_role_name return else: log.error('Property found was not formatted as expected: %s', prop_parts) else: log.error('Did not find a unique matching deployment property') msg = 'Could not determine CONS3RT_ROLE_NAME from deployment properties' log.error(msg) raise DeploymentError(msg)
[ "def", "determine_cons3rt_role_name_linux", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.determine_cons3rt_role_name_linux'", ")", "# Determine IP addresses for this system", "log", ".", "info", "(", "'Determining the IPv4 addresses for this system...'", ")", "try", ":", "ip_addresses", "=", "get_ip_addresses", "(", ")", "except", "CommandError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to get the IP address of this system, thus cannot determine the '", "'CONS3RT_ROLE_NAME\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Found IP addresses: {a}'", ".", "format", "(", "a", "=", "ip_addresses", ")", ")", "log", ".", "info", "(", "'Trying to determine IP address for eth0...'", ")", "try", ":", "ip_address", "=", "ip_addresses", "[", "'eth0'", "]", "except", "KeyError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to determine the IP address for eth0. Found the '", "'following IP addresses: {i}\\n{e}'", ".", "format", "(", "i", "=", "ip_addresses", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Found IP address for eth0: {i}'", ".", "format", "(", "i", "=", "ip_address", ")", ")", "pattern", "=", "'^cons3rt\\.fap\\.deployment\\.machine.*0.internalIp='", "+", "ip_address", "+", "'$'", "try", ":", "f", "=", "open", "(", "self", ".", "properties_file", ")", "except", "IOError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Could not open file {f}'", ".", "format", "(", "f", "=", "self", ".", "properties_file", ")", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", ",", "msg", ",", "trace", "prop_list_matched", "=", "[", "]", "log", ".", "debug", "(", "'Searching for deployment properties matching pattern: {p}'", ".", "format", "(", "p", "=", "pattern", ")", ")", "for", "line", "in", "f", ":", "log", ".", "debug", "(", "'Processing deployment properties file line: {l}'", ".", "format", "(", "l", "=", "line", ")", ")", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "elif", "'='", "in", "line", ":", "match", "=", "re", ".", "search", "(", "pattern", ",", "line", ")", "if", "match", ":", "log", ".", "debug", "(", "'Found matching prop: {l}'", ".", "format", "(", "l", "=", "line", ")", ")", "prop_list_matched", ".", "append", "(", "line", ")", "log", ".", "debug", "(", "'Number of matching properties found: {n}'", ".", "format", "(", "n", "=", "len", "(", "prop_list_matched", ")", ")", ")", "if", "len", "(", "prop_list_matched", ")", "==", "1", ":", "prop_parts", "=", "prop_list_matched", "[", "0", "]", ".", "split", "(", "'.'", ")", "if", "len", "(", "prop_parts", ")", ">", "5", ":", "self", ".", "cons3rt_role_name", "=", "prop_parts", "[", "4", "]", "log", ".", "info", "(", "'Found CONS3RT_ROLE_NAME from deployment properties: {c}'", ".", "format", "(", "c", "=", "self", ".", "cons3rt_role_name", ")", ")", "log", ".", "info", "(", "'Adding CONS3RT_ROLE_NAME to the current environment...'", ")", "os", ".", "environ", "[", "'CONS3RT_ROLE_NAME'", "]", "=", "self", ".", "cons3rt_role_name", "return", "else", ":", "log", ".", "error", "(", "'Property found was not formatted as expected: %s'", ",", "prop_parts", ")", "else", ":", "log", ".", "error", "(", "'Did not find a unique matching deployment property'", ")", "msg", "=", "'Could not determine CONS3RT_ROLE_NAME from deployment properties'", "log", ".", "error", "(", "msg", ")", "raise", "DeploymentError", "(", "msg", ")" ]
Determines the CONS3RT_ROLE_NAME for this Linux system, and Set the cons3rt_role_name member for this system This method determines the CONS3RT_ROLE_NAME for this system in the deployment by first checking for the environment variable, if not set, determining the value from the deployment properties. :return: None :raises: DeploymentError
[ "Determines", "the", "CONS3RT_ROLE_NAME", "for", "this", "Linux", "system", "and", "Set", "the", "cons3rt_role_name", "member", "for", "this", "system" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L324-L399
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_asset_dir
def set_asset_dir(self): """Returns the ASSET_DIR environment variable This method gets the ASSET_DIR environment variable for the current asset install. It returns either the string value if set or None if it is not set. :return: None """ log = logging.getLogger(self.cls_logger + '.get_asset_dir') try: self.asset_dir = os.environ['ASSET_DIR'] except KeyError: log.warn('Environment variable ASSET_DIR is not set!') else: log.info('Found environment variable ASSET_DIR: {a}'.format(a=self.asset_dir))
python
def set_asset_dir(self): """Returns the ASSET_DIR environment variable This method gets the ASSET_DIR environment variable for the current asset install. It returns either the string value if set or None if it is not set. :return: None """ log = logging.getLogger(self.cls_logger + '.get_asset_dir') try: self.asset_dir = os.environ['ASSET_DIR'] except KeyError: log.warn('Environment variable ASSET_DIR is not set!') else: log.info('Found environment variable ASSET_DIR: {a}'.format(a=self.asset_dir))
[ "def", "set_asset_dir", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_asset_dir'", ")", "try", ":", "self", ".", "asset_dir", "=", "os", ".", "environ", "[", "'ASSET_DIR'", "]", "except", "KeyError", ":", "log", ".", "warn", "(", "'Environment variable ASSET_DIR is not set!'", ")", "else", ":", "log", ".", "info", "(", "'Found environment variable ASSET_DIR: {a}'", ".", "format", "(", "a", "=", "self", ".", "asset_dir", ")", ")" ]
Returns the ASSET_DIR environment variable This method gets the ASSET_DIR environment variable for the current asset install. It returns either the string value if set or None if it is not set. :return: None
[ "Returns", "the", "ASSET_DIR", "environment", "variable" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L401-L416
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_scenario_role_names
def set_scenario_role_names(self): """Populates the list of scenario role names in this deployment and populates the scenario_master with the master role Gets a list of deployment properties containing "isMaster" because there is exactly one per scenario host, containing the role name :return: """ log = logging.getLogger(self.cls_logger + '.set_scenario_role_names') is_master_props = self.get_matching_property_names('isMaster') for is_master_prop in is_master_props: role_name = is_master_prop.split('.')[-1] log.info('Adding scenario host: {n}'.format(n=role_name)) self.scenario_role_names.append(role_name) # Determine if this is the scenario master is_master = self.get_value(is_master_prop).lower().strip() if is_master == 'true': log.info('Found master scenario host: {r}'.format(r=role_name)) self.scenario_master = role_name
python
def set_scenario_role_names(self): """Populates the list of scenario role names in this deployment and populates the scenario_master with the master role Gets a list of deployment properties containing "isMaster" because there is exactly one per scenario host, containing the role name :return: """ log = logging.getLogger(self.cls_logger + '.set_scenario_role_names') is_master_props = self.get_matching_property_names('isMaster') for is_master_prop in is_master_props: role_name = is_master_prop.split('.')[-1] log.info('Adding scenario host: {n}'.format(n=role_name)) self.scenario_role_names.append(role_name) # Determine if this is the scenario master is_master = self.get_value(is_master_prop).lower().strip() if is_master == 'true': log.info('Found master scenario host: {r}'.format(r=role_name)) self.scenario_master = role_name
[ "def", "set_scenario_role_names", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_scenario_role_names'", ")", "is_master_props", "=", "self", ".", "get_matching_property_names", "(", "'isMaster'", ")", "for", "is_master_prop", "in", "is_master_props", ":", "role_name", "=", "is_master_prop", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "log", ".", "info", "(", "'Adding scenario host: {n}'", ".", "format", "(", "n", "=", "role_name", ")", ")", "self", ".", "scenario_role_names", ".", "append", "(", "role_name", ")", "# Determine if this is the scenario master", "is_master", "=", "self", ".", "get_value", "(", "is_master_prop", ")", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "is_master", "==", "'true'", ":", "log", ".", "info", "(", "'Found master scenario host: {r}'", ".", "format", "(", "r", "=", "role_name", ")", ")", "self", ".", "scenario_master", "=", "role_name" ]
Populates the list of scenario role names in this deployment and populates the scenario_master with the master role Gets a list of deployment properties containing "isMaster" because there is exactly one per scenario host, containing the role name :return:
[ "Populates", "the", "list", "of", "scenario", "role", "names", "in", "this", "deployment", "and", "populates", "the", "scenario_master", "with", "the", "master", "role" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L418-L438
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_scenario_network_info
def set_scenario_network_info(self): """Populates a list of network info for each scenario host from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_scenario_network_info') for scenario_host in self.scenario_role_names: scenario_host_network_info = {'scenario_role_name': scenario_host} log.debug('Looking up network info from deployment properties for scenario host: {s}'.format( s=scenario_host)) network_name_props = self.get_matching_property_names( 'cons3rt.fap.deployment.machine.*{r}.*networkName'.format(r=scenario_host) ) log.debug('Found {n} network name props'.format(n=str(len(network_name_props)))) network_info_list = [] for network_name_prop in network_name_props: network_info = {} network_name = self.get_value(network_name_prop) if not network_name: log.debug('Network name not found for prop: {n}'.format(n=network_name_prop)) continue log.debug('Adding info for network name: {n}'.format(n=network_name)) network_info['network_name'] = network_name interface_name_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.interfaceName'.format( r=scenario_host, n=network_name) interface_name = self.get_value(interface_name_prop) if interface_name: network_info['interface_name'] = interface_name external_ip_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.externalIp'.format( r=scenario_host, n=network_name) external_ip = self.get_value(external_ip_prop) if external_ip: network_info['external_ip'] = external_ip internal_ip_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.internalIp'.format( r=scenario_host, n=network_name) internal_ip = self.get_value(internal_ip_prop) if internal_ip: network_info['internal_ip'] = internal_ip is_cons3rt_connection_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.isCons3rtConnection'.format( r=scenario_host, n=network_name) is_cons3rt_connection = self.get_value(is_cons3rt_connection_prop) if is_cons3rt_connection: if is_cons3rt_connection.lower().strip() == 'true': network_info['is_cons3rt_connection'] = True else: network_info['is_cons3rt_connection'] = False mac_address_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.mac'.format(r=scenario_host, n=network_name) mac_address = self.get_value(mac_address_prop) if mac_address: # Trim the escape characters from the mac address mac_address = mac_address.replace('\\', '') network_info['mac_address'] = mac_address log.debug('Found network info: {n}'.format(n=str(network_info))) network_info_list.append(network_info) scenario_host_network_info['network_info'] = network_info_list self.scenario_network_info.append(scenario_host_network_info)
python
def set_scenario_network_info(self): """Populates a list of network info for each scenario host from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_scenario_network_info') for scenario_host in self.scenario_role_names: scenario_host_network_info = {'scenario_role_name': scenario_host} log.debug('Looking up network info from deployment properties for scenario host: {s}'.format( s=scenario_host)) network_name_props = self.get_matching_property_names( 'cons3rt.fap.deployment.machine.*{r}.*networkName'.format(r=scenario_host) ) log.debug('Found {n} network name props'.format(n=str(len(network_name_props)))) network_info_list = [] for network_name_prop in network_name_props: network_info = {} network_name = self.get_value(network_name_prop) if not network_name: log.debug('Network name not found for prop: {n}'.format(n=network_name_prop)) continue log.debug('Adding info for network name: {n}'.format(n=network_name)) network_info['network_name'] = network_name interface_name_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.interfaceName'.format( r=scenario_host, n=network_name) interface_name = self.get_value(interface_name_prop) if interface_name: network_info['interface_name'] = interface_name external_ip_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.externalIp'.format( r=scenario_host, n=network_name) external_ip = self.get_value(external_ip_prop) if external_ip: network_info['external_ip'] = external_ip internal_ip_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.internalIp'.format( r=scenario_host, n=network_name) internal_ip = self.get_value(internal_ip_prop) if internal_ip: network_info['internal_ip'] = internal_ip is_cons3rt_connection_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.isCons3rtConnection'.format( r=scenario_host, n=network_name) is_cons3rt_connection = self.get_value(is_cons3rt_connection_prop) if is_cons3rt_connection: if is_cons3rt_connection.lower().strip() == 'true': network_info['is_cons3rt_connection'] = True else: network_info['is_cons3rt_connection'] = False mac_address_prop = 'cons3rt.fap.deployment.machine.{r}.{n}.mac'.format(r=scenario_host, n=network_name) mac_address = self.get_value(mac_address_prop) if mac_address: # Trim the escape characters from the mac address mac_address = mac_address.replace('\\', '') network_info['mac_address'] = mac_address log.debug('Found network info: {n}'.format(n=str(network_info))) network_info_list.append(network_info) scenario_host_network_info['network_info'] = network_info_list self.scenario_network_info.append(scenario_host_network_info)
[ "def", "set_scenario_network_info", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_scenario_network_info'", ")", "for", "scenario_host", "in", "self", ".", "scenario_role_names", ":", "scenario_host_network_info", "=", "{", "'scenario_role_name'", ":", "scenario_host", "}", "log", ".", "debug", "(", "'Looking up network info from deployment properties for scenario host: {s}'", ".", "format", "(", "s", "=", "scenario_host", ")", ")", "network_name_props", "=", "self", ".", "get_matching_property_names", "(", "'cons3rt.fap.deployment.machine.*{r}.*networkName'", ".", "format", "(", "r", "=", "scenario_host", ")", ")", "log", ".", "debug", "(", "'Found {n} network name props'", ".", "format", "(", "n", "=", "str", "(", "len", "(", "network_name_props", ")", ")", ")", ")", "network_info_list", "=", "[", "]", "for", "network_name_prop", "in", "network_name_props", ":", "network_info", "=", "{", "}", "network_name", "=", "self", ".", "get_value", "(", "network_name_prop", ")", "if", "not", "network_name", ":", "log", ".", "debug", "(", "'Network name not found for prop: {n}'", ".", "format", "(", "n", "=", "network_name_prop", ")", ")", "continue", "log", ".", "debug", "(", "'Adding info for network name: {n}'", ".", "format", "(", "n", "=", "network_name", ")", ")", "network_info", "[", "'network_name'", "]", "=", "network_name", "interface_name_prop", "=", "'cons3rt.fap.deployment.machine.{r}.{n}.interfaceName'", ".", "format", "(", "r", "=", "scenario_host", ",", "n", "=", "network_name", ")", "interface_name", "=", "self", ".", "get_value", "(", "interface_name_prop", ")", "if", "interface_name", ":", "network_info", "[", "'interface_name'", "]", "=", "interface_name", "external_ip_prop", "=", "'cons3rt.fap.deployment.machine.{r}.{n}.externalIp'", ".", "format", "(", "r", "=", "scenario_host", ",", "n", "=", "network_name", ")", "external_ip", "=", "self", ".", "get_value", "(", "external_ip_prop", ")", "if", "external_ip", ":", "network_info", "[", "'external_ip'", "]", "=", "external_ip", "internal_ip_prop", "=", "'cons3rt.fap.deployment.machine.{r}.{n}.internalIp'", ".", "format", "(", "r", "=", "scenario_host", ",", "n", "=", "network_name", ")", "internal_ip", "=", "self", ".", "get_value", "(", "internal_ip_prop", ")", "if", "internal_ip", ":", "network_info", "[", "'internal_ip'", "]", "=", "internal_ip", "is_cons3rt_connection_prop", "=", "'cons3rt.fap.deployment.machine.{r}.{n}.isCons3rtConnection'", ".", "format", "(", "r", "=", "scenario_host", ",", "n", "=", "network_name", ")", "is_cons3rt_connection", "=", "self", ".", "get_value", "(", "is_cons3rt_connection_prop", ")", "if", "is_cons3rt_connection", ":", "if", "is_cons3rt_connection", ".", "lower", "(", ")", ".", "strip", "(", ")", "==", "'true'", ":", "network_info", "[", "'is_cons3rt_connection'", "]", "=", "True", "else", ":", "network_info", "[", "'is_cons3rt_connection'", "]", "=", "False", "mac_address_prop", "=", "'cons3rt.fap.deployment.machine.{r}.{n}.mac'", ".", "format", "(", "r", "=", "scenario_host", ",", "n", "=", "network_name", ")", "mac_address", "=", "self", ".", "get_value", "(", "mac_address_prop", ")", "if", "mac_address", ":", "# Trim the escape characters from the mac address", "mac_address", "=", "mac_address", ".", "replace", "(", "'\\\\'", ",", "''", ")", "network_info", "[", "'mac_address'", "]", "=", "mac_address", "log", ".", "debug", "(", "'Found network info: {n}'", ".", "format", "(", "n", "=", "str", "(", "network_info", ")", ")", ")", "network_info_list", ".", "append", "(", "network_info", ")", "scenario_host_network_info", "[", "'network_info'", "]", "=", "network_info_list", "self", ".", "scenario_network_info", ".", "append", "(", "scenario_host_network_info", ")" ]
Populates a list of network info for each scenario host from deployment properties :return: None
[ "Populates", "a", "list", "of", "network", "info", "for", "each", "scenario", "host", "from", "deployment", "properties" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L440-L498
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_deployment_name
def set_deployment_name(self): """Sets the deployment name from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_name') self.deployment_name = self.get_value('cons3rt.deployment.name') log.info('Found deployment name: {n}'.format(n=self.deployment_name))
python
def set_deployment_name(self): """Sets the deployment name from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_name') self.deployment_name = self.get_value('cons3rt.deployment.name') log.info('Found deployment name: {n}'.format(n=self.deployment_name))
[ "def", "set_deployment_name", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_deployment_name'", ")", "self", ".", "deployment_name", "=", "self", ".", "get_value", "(", "'cons3rt.deployment.name'", ")", "log", ".", "info", "(", "'Found deployment name: {n}'", ".", "format", "(", "n", "=", "self", ".", "deployment_name", ")", ")" ]
Sets the deployment name from deployment properties :return: None
[ "Sets", "the", "deployment", "name", "from", "deployment", "properties" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L500-L507
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_deployment_id
def set_deployment_id(self): """Sets the deployment ID from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_id') deployment_id_val = self.get_value('cons3rt.deployment.id') if not deployment_id_val: log.debug('Deployment ID not found in deployment properties') return try: deployment_id = int(deployment_id_val) except ValueError: log.debug('Deployment ID found was unable to convert to an int: {d}'.format(d=deployment_id_val)) return self.deployment_id = deployment_id log.info('Found deployment ID: {i}'.format(i=str(self.deployment_id)))
python
def set_deployment_id(self): """Sets the deployment ID from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_id') deployment_id_val = self.get_value('cons3rt.deployment.id') if not deployment_id_val: log.debug('Deployment ID not found in deployment properties') return try: deployment_id = int(deployment_id_val) except ValueError: log.debug('Deployment ID found was unable to convert to an int: {d}'.format(d=deployment_id_val)) return self.deployment_id = deployment_id log.info('Found deployment ID: {i}'.format(i=str(self.deployment_id)))
[ "def", "set_deployment_id", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_deployment_id'", ")", "deployment_id_val", "=", "self", ".", "get_value", "(", "'cons3rt.deployment.id'", ")", "if", "not", "deployment_id_val", ":", "log", ".", "debug", "(", "'Deployment ID not found in deployment properties'", ")", "return", "try", ":", "deployment_id", "=", "int", "(", "deployment_id_val", ")", "except", "ValueError", ":", "log", ".", "debug", "(", "'Deployment ID found was unable to convert to an int: {d}'", ".", "format", "(", "d", "=", "deployment_id_val", ")", ")", "return", "self", ".", "deployment_id", "=", "deployment_id", "log", ".", "info", "(", "'Found deployment ID: {i}'", ".", "format", "(", "i", "=", "str", "(", "self", ".", "deployment_id", ")", ")", ")" ]
Sets the deployment ID from deployment properties :return: None
[ "Sets", "the", "deployment", "ID", "from", "deployment", "properties" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L509-L525
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_deployment_run_name
def set_deployment_run_name(self): """Sets the deployment run name from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_run_name') self.deployment_run_name = self.get_value('cons3rt.deploymentRun.name') log.info('Found deployment run name: {n}'.format(n=self.deployment_run_name))
python
def set_deployment_run_name(self): """Sets the deployment run name from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_run_name') self.deployment_run_name = self.get_value('cons3rt.deploymentRun.name') log.info('Found deployment run name: {n}'.format(n=self.deployment_run_name))
[ "def", "set_deployment_run_name", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_deployment_run_name'", ")", "self", ".", "deployment_run_name", "=", "self", ".", "get_value", "(", "'cons3rt.deploymentRun.name'", ")", "log", ".", "info", "(", "'Found deployment run name: {n}'", ".", "format", "(", "n", "=", "self", ".", "deployment_run_name", ")", ")" ]
Sets the deployment run name from deployment properties :return: None
[ "Sets", "the", "deployment", "run", "name", "from", "deployment", "properties" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L527-L534
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_deployment_run_id
def set_deployment_run_id(self): """Sets the deployment run ID from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_run_id') deployment_run_id_val = self.get_value('cons3rt.deploymentRun.id') if not deployment_run_id_val: log.debug('Deployment run ID not found in deployment properties') return try: deployment_run_id = int(deployment_run_id_val) except ValueError: log.debug('Deployment run ID found was unable to convert to an int: {d}'.format(d=deployment_run_id_val)) return self.deployment_run_id = deployment_run_id log.info('Found deployment run ID: {i}'.format(i=str(self.deployment_run_id)))
python
def set_deployment_run_id(self): """Sets the deployment run ID from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_deployment_run_id') deployment_run_id_val = self.get_value('cons3rt.deploymentRun.id') if not deployment_run_id_val: log.debug('Deployment run ID not found in deployment properties') return try: deployment_run_id = int(deployment_run_id_val) except ValueError: log.debug('Deployment run ID found was unable to convert to an int: {d}'.format(d=deployment_run_id_val)) return self.deployment_run_id = deployment_run_id log.info('Found deployment run ID: {i}'.format(i=str(self.deployment_run_id)))
[ "def", "set_deployment_run_id", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_deployment_run_id'", ")", "deployment_run_id_val", "=", "self", ".", "get_value", "(", "'cons3rt.deploymentRun.id'", ")", "if", "not", "deployment_run_id_val", ":", "log", ".", "debug", "(", "'Deployment run ID not found in deployment properties'", ")", "return", "try", ":", "deployment_run_id", "=", "int", "(", "deployment_run_id_val", ")", "except", "ValueError", ":", "log", ".", "debug", "(", "'Deployment run ID found was unable to convert to an int: {d}'", ".", "format", "(", "d", "=", "deployment_run_id_val", ")", ")", "return", "self", ".", "deployment_run_id", "=", "deployment_run_id", "log", ".", "info", "(", "'Found deployment run ID: {i}'", ".", "format", "(", "i", "=", "str", "(", "self", ".", "deployment_run_id", ")", ")", ")" ]
Sets the deployment run ID from deployment properties :return: None
[ "Sets", "the", "deployment", "run", "ID", "from", "deployment", "properties" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L536-L552
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_virtualization_realm_type
def set_virtualization_realm_type(self): """Sets the virtualization realm type from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_virtualization_realm_type') self.virtualization_realm_type = self.get_value('cons3rt.deploymentRun.virtRealm.type') log.info('Found virtualization realm type : {t}'.format(t=self.virtualization_realm_type))
python
def set_virtualization_realm_type(self): """Sets the virtualization realm type from deployment properties :return: None """ log = logging.getLogger(self.cls_logger + '.set_virtualization_realm_type') self.virtualization_realm_type = self.get_value('cons3rt.deploymentRun.virtRealm.type') log.info('Found virtualization realm type : {t}'.format(t=self.virtualization_realm_type))
[ "def", "set_virtualization_realm_type", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_virtualization_realm_type'", ")", "self", ".", "virtualization_realm_type", "=", "self", ".", "get_value", "(", "'cons3rt.deploymentRun.virtRealm.type'", ")", "log", ".", "info", "(", "'Found virtualization realm type : {t}'", ".", "format", "(", "t", "=", "self", ".", "virtualization_realm_type", ")", ")" ]
Sets the virtualization realm type from deployment properties :return: None
[ "Sets", "the", "virtualization", "realm", "type", "from", "deployment", "properties" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L554-L561
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.update_hosts_file
def update_hosts_file(self, ip, entry): """Updated the hosts file depending on the OS :param ip: (str) IP address to update :param entry: (str) entry to associate to the IP address :return: None """ log = logging.getLogger(self.cls_logger + '.update_hosts_file') if get_os() in ['Linux', 'Darwin']: update_hosts_file_linux(ip=ip, entry=entry) elif get_os() == 'Windows': update_hosts_file_windows(ip=ip, entry=entry) else: log.warn('OS detected was not Windows nor Linux')
python
def update_hosts_file(self, ip, entry): """Updated the hosts file depending on the OS :param ip: (str) IP address to update :param entry: (str) entry to associate to the IP address :return: None """ log = logging.getLogger(self.cls_logger + '.update_hosts_file') if get_os() in ['Linux', 'Darwin']: update_hosts_file_linux(ip=ip, entry=entry) elif get_os() == 'Windows': update_hosts_file_windows(ip=ip, entry=entry) else: log.warn('OS detected was not Windows nor Linux')
[ "def", "update_hosts_file", "(", "self", ",", "ip", ",", "entry", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.update_hosts_file'", ")", "if", "get_os", "(", ")", "in", "[", "'Linux'", ",", "'Darwin'", "]", ":", "update_hosts_file_linux", "(", "ip", "=", "ip", ",", "entry", "=", "entry", ")", "elif", "get_os", "(", ")", "==", "'Windows'", ":", "update_hosts_file_windows", "(", "ip", "=", "ip", ",", "entry", "=", "entry", ")", "else", ":", "log", ".", "warn", "(", "'OS detected was not Windows nor Linux'", ")" ]
Updated the hosts file depending on the OS :param ip: (str) IP address to update :param entry: (str) entry to associate to the IP address :return: None
[ "Updated", "the", "hosts", "file", "depending", "on", "the", "OS" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L563-L577
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_scenario_hosts_file
def set_scenario_hosts_file(self, network_name='user-net', domain_name=None): """Adds hosts file entries for each system in the scenario for the specified network_name provided :param network_name: (str) Name of the network to add to the hosts file :param domain_name: (str) Domain name to include in the hosts file entries if provided :return: None """ log = logging.getLogger(self.cls_logger + '.set_scenario_hosts_file') log.info('Scanning scenario hosts to make entries in the hosts file for network: {n}'.format(n=network_name)) for scenario_host in self.scenario_network_info: if domain_name: host_file_entry = '{r}.{d} {r}'.format(r=scenario_host['scenario_role_name'], d=domain_name) else: host_file_entry = scenario_host['scenario_role_name'] for host_network_info in scenario_host['network_info']: if host_network_info['network_name'] == network_name: self.update_hosts_file(ip=host_network_info['internal_ip'], entry=host_file_entry)
python
def set_scenario_hosts_file(self, network_name='user-net', domain_name=None): """Adds hosts file entries for each system in the scenario for the specified network_name provided :param network_name: (str) Name of the network to add to the hosts file :param domain_name: (str) Domain name to include in the hosts file entries if provided :return: None """ log = logging.getLogger(self.cls_logger + '.set_scenario_hosts_file') log.info('Scanning scenario hosts to make entries in the hosts file for network: {n}'.format(n=network_name)) for scenario_host in self.scenario_network_info: if domain_name: host_file_entry = '{r}.{d} {r}'.format(r=scenario_host['scenario_role_name'], d=domain_name) else: host_file_entry = scenario_host['scenario_role_name'] for host_network_info in scenario_host['network_info']: if host_network_info['network_name'] == network_name: self.update_hosts_file(ip=host_network_info['internal_ip'], entry=host_file_entry)
[ "def", "set_scenario_hosts_file", "(", "self", ",", "network_name", "=", "'user-net'", ",", "domain_name", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_scenario_hosts_file'", ")", "log", ".", "info", "(", "'Scanning scenario hosts to make entries in the hosts file for network: {n}'", ".", "format", "(", "n", "=", "network_name", ")", ")", "for", "scenario_host", "in", "self", ".", "scenario_network_info", ":", "if", "domain_name", ":", "host_file_entry", "=", "'{r}.{d} {r}'", ".", "format", "(", "r", "=", "scenario_host", "[", "'scenario_role_name'", "]", ",", "d", "=", "domain_name", ")", "else", ":", "host_file_entry", "=", "scenario_host", "[", "'scenario_role_name'", "]", "for", "host_network_info", "in", "scenario_host", "[", "'network_info'", "]", ":", "if", "host_network_info", "[", "'network_name'", "]", "==", "network_name", ":", "self", ".", "update_hosts_file", "(", "ip", "=", "host_network_info", "[", "'internal_ip'", "]", ",", "entry", "=", "host_file_entry", ")" ]
Adds hosts file entries for each system in the scenario for the specified network_name provided :param network_name: (str) Name of the network to add to the hosts file :param domain_name: (str) Domain name to include in the hosts file entries if provided :return: None
[ "Adds", "hosts", "file", "entries", "for", "each", "system", "in", "the", "scenario", "for", "the", "specified", "network_name", "provided" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L579-L597
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.set_hosts_file_entry_for_role
def set_hosts_file_entry_for_role(self, role_name, network_name='user-net', fqdn=None, domain_name=None): """Adds an entry to the hosts file for a scenario host given the role name and network name :param role_name: (str) role name of the host to add :param network_name: (str) Name of the network to add to the hosts file :param fqdn: (str) Fully qualified domain name to use in the hosts file entry (trumps domain name) :param domain_name: (str) Domain name to include in the hosts file entries if provided :return: """ log = logging.getLogger(self.cls_logger + '.set_hosts_file_entry_for_role') # Determine the host file entry portion if fqdn: host_file_entry = fqdn else: if domain_name: host_file_entry = '{r}.{d} {r}'.format(r=role_name, d=domain_name) else: host_file_entry = role_name log.info('Using hosts file entry: {e}'.format(e=host_file_entry)) log.info('Scanning scenario hosts for role name [{r}] and network: {n}'.format(r=role_name, n=network_name)) for scenario_host in self.scenario_network_info: if scenario_host['scenario_role_name'] == role_name: for host_network_info in scenario_host['network_info']: if host_network_info['network_name'] == network_name: self.update_hosts_file(ip=host_network_info['internal_ip'], entry=host_file_entry)
python
def set_hosts_file_entry_for_role(self, role_name, network_name='user-net', fqdn=None, domain_name=None): """Adds an entry to the hosts file for a scenario host given the role name and network name :param role_name: (str) role name of the host to add :param network_name: (str) Name of the network to add to the hosts file :param fqdn: (str) Fully qualified domain name to use in the hosts file entry (trumps domain name) :param domain_name: (str) Domain name to include in the hosts file entries if provided :return: """ log = logging.getLogger(self.cls_logger + '.set_hosts_file_entry_for_role') # Determine the host file entry portion if fqdn: host_file_entry = fqdn else: if domain_name: host_file_entry = '{r}.{d} {r}'.format(r=role_name, d=domain_name) else: host_file_entry = role_name log.info('Using hosts file entry: {e}'.format(e=host_file_entry)) log.info('Scanning scenario hosts for role name [{r}] and network: {n}'.format(r=role_name, n=network_name)) for scenario_host in self.scenario_network_info: if scenario_host['scenario_role_name'] == role_name: for host_network_info in scenario_host['network_info']: if host_network_info['network_name'] == network_name: self.update_hosts_file(ip=host_network_info['internal_ip'], entry=host_file_entry)
[ "def", "set_hosts_file_entry_for_role", "(", "self", ",", "role_name", ",", "network_name", "=", "'user-net'", ",", "fqdn", "=", "None", ",", "domain_name", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.set_hosts_file_entry_for_role'", ")", "# Determine the host file entry portion", "if", "fqdn", ":", "host_file_entry", "=", "fqdn", "else", ":", "if", "domain_name", ":", "host_file_entry", "=", "'{r}.{d} {r}'", ".", "format", "(", "r", "=", "role_name", ",", "d", "=", "domain_name", ")", "else", ":", "host_file_entry", "=", "role_name", "log", ".", "info", "(", "'Using hosts file entry: {e}'", ".", "format", "(", "e", "=", "host_file_entry", ")", ")", "log", ".", "info", "(", "'Scanning scenario hosts for role name [{r}] and network: {n}'", ".", "format", "(", "r", "=", "role_name", ",", "n", "=", "network_name", ")", ")", "for", "scenario_host", "in", "self", ".", "scenario_network_info", ":", "if", "scenario_host", "[", "'scenario_role_name'", "]", "==", "role_name", ":", "for", "host_network_info", "in", "scenario_host", "[", "'network_info'", "]", ":", "if", "host_network_info", "[", "'network_name'", "]", "==", "network_name", ":", "self", ".", "update_hosts_file", "(", "ip", "=", "host_network_info", "[", "'internal_ip'", "]", ",", "entry", "=", "host_file_entry", ")" ]
Adds an entry to the hosts file for a scenario host given the role name and network name :param role_name: (str) role name of the host to add :param network_name: (str) Name of the network to add to the hosts file :param fqdn: (str) Fully qualified domain name to use in the hosts file entry (trumps domain name) :param domain_name: (str) Domain name to include in the hosts file entries if provided :return:
[ "Adds", "an", "entry", "to", "the", "hosts", "file", "for", "a", "scenario", "host", "given", "the", "role", "name", "and", "network", "name" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L599-L625
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.get_ip_on_network
def get_ip_on_network(self, network_name): """Given a network name, returns the IP address :param network_name: (str) Name of the network to search for :return: (str) IP address on the specified network or None """ return self.get_scenario_host_ip_on_network( scenario_role_name=self.cons3rt_role_name, network_name=network_name )
python
def get_ip_on_network(self, network_name): """Given a network name, returns the IP address :param network_name: (str) Name of the network to search for :return: (str) IP address on the specified network or None """ return self.get_scenario_host_ip_on_network( scenario_role_name=self.cons3rt_role_name, network_name=network_name )
[ "def", "get_ip_on_network", "(", "self", ",", "network_name", ")", ":", "return", "self", ".", "get_scenario_host_ip_on_network", "(", "scenario_role_name", "=", "self", ".", "cons3rt_role_name", ",", "network_name", "=", "network_name", ")" ]
Given a network name, returns the IP address :param network_name: (str) Name of the network to search for :return: (str) IP address on the specified network or None
[ "Given", "a", "network", "name", "returns", "the", "IP", "address" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L627-L636
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.get_scenario_host_ip_on_network
def get_scenario_host_ip_on_network(self, scenario_role_name, network_name): """Given a network name, returns the IP address :param network_name: (str) Name of the network to search for :param scenario_role_name: (str) role name to return the IP address for :return: (str) IP address on the specified network or None """ log = logging.getLogger(self.cls_logger + '.get_scenario_host_ip_on_network') # Determine the network info for this host based on role name cons3rt_network_info = None for scenario_host in self.scenario_network_info: if scenario_host['scenario_role_name'] == scenario_role_name: cons3rt_network_info = scenario_host['network_info'] if not cons3rt_network_info: log.warn('Unable to find network info for this host') return # Attempt to find a matching IP for network name internal_ip = None for cons3rt_network in cons3rt_network_info: if cons3rt_network['network_name'] == network_name: internal_ip = cons3rt_network['internal_ip'] if not internal_ip: log.warn('Unable to find an internal IP for network: {n}'.format(n=network_name)) return log.debug('Found IP address [{i}] for network name: {n}'.format(i=internal_ip, n=network_name)) return internal_ip
python
def get_scenario_host_ip_on_network(self, scenario_role_name, network_name): """Given a network name, returns the IP address :param network_name: (str) Name of the network to search for :param scenario_role_name: (str) role name to return the IP address for :return: (str) IP address on the specified network or None """ log = logging.getLogger(self.cls_logger + '.get_scenario_host_ip_on_network') # Determine the network info for this host based on role name cons3rt_network_info = None for scenario_host in self.scenario_network_info: if scenario_host['scenario_role_name'] == scenario_role_name: cons3rt_network_info = scenario_host['network_info'] if not cons3rt_network_info: log.warn('Unable to find network info for this host') return # Attempt to find a matching IP for network name internal_ip = None for cons3rt_network in cons3rt_network_info: if cons3rt_network['network_name'] == network_name: internal_ip = cons3rt_network['internal_ip'] if not internal_ip: log.warn('Unable to find an internal IP for network: {n}'.format(n=network_name)) return log.debug('Found IP address [{i}] for network name: {n}'.format(i=internal_ip, n=network_name)) return internal_ip
[ "def", "get_scenario_host_ip_on_network", "(", "self", ",", "scenario_role_name", ",", "network_name", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_scenario_host_ip_on_network'", ")", "# Determine the network info for this host based on role name", "cons3rt_network_info", "=", "None", "for", "scenario_host", "in", "self", ".", "scenario_network_info", ":", "if", "scenario_host", "[", "'scenario_role_name'", "]", "==", "scenario_role_name", ":", "cons3rt_network_info", "=", "scenario_host", "[", "'network_info'", "]", "if", "not", "cons3rt_network_info", ":", "log", ".", "warn", "(", "'Unable to find network info for this host'", ")", "return", "# Attempt to find a matching IP for network name", "internal_ip", "=", "None", "for", "cons3rt_network", "in", "cons3rt_network_info", ":", "if", "cons3rt_network", "[", "'network_name'", "]", "==", "network_name", ":", "internal_ip", "=", "cons3rt_network", "[", "'internal_ip'", "]", "if", "not", "internal_ip", ":", "log", ".", "warn", "(", "'Unable to find an internal IP for network: {n}'", ".", "format", "(", "n", "=", "network_name", ")", ")", "return", "log", ".", "debug", "(", "'Found IP address [{i}] for network name: {n}'", ".", "format", "(", "i", "=", "internal_ip", ",", "n", "=", "network_name", ")", ")", "return", "internal_ip" ]
Given a network name, returns the IP address :param network_name: (str) Name of the network to search for :param scenario_role_name: (str) role name to return the IP address for :return: (str) IP address on the specified network or None
[ "Given", "a", "network", "name", "returns", "the", "IP", "address" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L638-L665
cons3rt/pycons3rt
pycons3rt/deployment.py
Deployment.get_device_for_network_linux
def get_device_for_network_linux(self, network_name): """Given a cons3rt network name, return the network interface name on this Linux system :param network_name: (str) Name of the network to search for :return: (str) name of the network interface device or None """ log = logging.getLogger(self.cls_logger + '.get_device_for_network_linux') if get_os() not in ['Linux']: log.warn('Non-linux OS detected, returning...') return # Get the IP address for the network name according to cons3rt ip_address = self.get_ip_on_network(network_name=network_name) if not ip_address: log.warn('IP address not found for network with name: {n}'.format(n=network_name)) return # Get the system device names and ip addresses sys_info = ip_addr() # Check for a matching IP address device_name = None for device_name, sys_ip_address in sys_info.iteritems(): if sys_ip_address == ip_address: log.debug('Found matching system IP [{i}] for device: {d}'.format(i=ip_address, d=device_name)) if not device_name: log.warn('Network device not found with IP address {i} in system network data: {d}'.format( i=ip_address, d=str(sys_info))) return log.debug('Found device name [{d}] with IP address [{i}] for network: {n}'.format( d=device_name, i=ip_address, n=network_name)) return device_name
python
def get_device_for_network_linux(self, network_name): """Given a cons3rt network name, return the network interface name on this Linux system :param network_name: (str) Name of the network to search for :return: (str) name of the network interface device or None """ log = logging.getLogger(self.cls_logger + '.get_device_for_network_linux') if get_os() not in ['Linux']: log.warn('Non-linux OS detected, returning...') return # Get the IP address for the network name according to cons3rt ip_address = self.get_ip_on_network(network_name=network_name) if not ip_address: log.warn('IP address not found for network with name: {n}'.format(n=network_name)) return # Get the system device names and ip addresses sys_info = ip_addr() # Check for a matching IP address device_name = None for device_name, sys_ip_address in sys_info.iteritems(): if sys_ip_address == ip_address: log.debug('Found matching system IP [{i}] for device: {d}'.format(i=ip_address, d=device_name)) if not device_name: log.warn('Network device not found with IP address {i} in system network data: {d}'.format( i=ip_address, d=str(sys_info))) return log.debug('Found device name [{d}] with IP address [{i}] for network: {n}'.format( d=device_name, i=ip_address, n=network_name)) return device_name
[ "def", "get_device_for_network_linux", "(", "self", ",", "network_name", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_device_for_network_linux'", ")", "if", "get_os", "(", ")", "not", "in", "[", "'Linux'", "]", ":", "log", ".", "warn", "(", "'Non-linux OS detected, returning...'", ")", "return", "# Get the IP address for the network name according to cons3rt", "ip_address", "=", "self", ".", "get_ip_on_network", "(", "network_name", "=", "network_name", ")", "if", "not", "ip_address", ":", "log", ".", "warn", "(", "'IP address not found for network with name: {n}'", ".", "format", "(", "n", "=", "network_name", ")", ")", "return", "# Get the system device names and ip addresses", "sys_info", "=", "ip_addr", "(", ")", "# Check for a matching IP address", "device_name", "=", "None", "for", "device_name", ",", "sys_ip_address", "in", "sys_info", ".", "iteritems", "(", ")", ":", "if", "sys_ip_address", "==", "ip_address", ":", "log", ".", "debug", "(", "'Found matching system IP [{i}] for device: {d}'", ".", "format", "(", "i", "=", "ip_address", ",", "d", "=", "device_name", ")", ")", "if", "not", "device_name", ":", "log", ".", "warn", "(", "'Network device not found with IP address {i} in system network data: {d}'", ".", "format", "(", "i", "=", "ip_address", ",", "d", "=", "str", "(", "sys_info", ")", ")", ")", "return", "log", ".", "debug", "(", "'Found device name [{d}] with IP address [{i}] for network: {n}'", ".", "format", "(", "d", "=", "device_name", ",", "i", "=", "ip_address", ",", "n", "=", "network_name", ")", ")", "return", "device_name" ]
Given a cons3rt network name, return the network interface name on this Linux system :param network_name: (str) Name of the network to search for :return: (str) name of the network interface device or None
[ "Given", "a", "cons3rt", "network", "name", "return", "the", "network", "interface", "name", "on", "this", "Linux", "system" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/deployment.py#L667-L701
kajala/django-jutil
jutil/auth.py
require_auth
def require_auth(request: Request, exceptions: bool=True) -> User: """ Returns authenticated User. :param request: HttpRequest :param exceptions: Raise (NotAuthenticated) exception. Default is True. :return: User """ if not request.user or not request.user.is_authenticated: if exceptions: raise NotAuthenticated() return None return request.user
python
def require_auth(request: Request, exceptions: bool=True) -> User: """ Returns authenticated User. :param request: HttpRequest :param exceptions: Raise (NotAuthenticated) exception. Default is True. :return: User """ if not request.user or not request.user.is_authenticated: if exceptions: raise NotAuthenticated() return None return request.user
[ "def", "require_auth", "(", "request", ":", "Request", ",", "exceptions", ":", "bool", "=", "True", ")", "->", "User", ":", "if", "not", "request", ".", "user", "or", "not", "request", ".", "user", ".", "is_authenticated", ":", "if", "exceptions", ":", "raise", "NotAuthenticated", "(", ")", "return", "None", "return", "request", ".", "user" ]
Returns authenticated User. :param request: HttpRequest :param exceptions: Raise (NotAuthenticated) exception. Default is True. :return: User
[ "Returns", "authenticated", "User", ".", ":", "param", "request", ":", "HttpRequest", ":", "param", "exceptions", ":", "Raise", "(", "NotAuthenticated", ")", "exception", ".", "Default", "is", "True", ".", ":", "return", ":", "User" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/auth.py#L7-L18
mozilla/libnfldap
libnfldap.py
IPTables.insertSaneDefaults
def insertSaneDefaults(self): """ Add sane defaults rules to the raw and filter tables """ self.raw.insert(0, '-A OUTPUT -o lo -j NOTRACK') self.raw.insert(1, '-A PREROUTING -i lo -j NOTRACK') self.filters.insert(0, '-A INPUT -i lo -j ACCEPT') self.filters.insert(1, '-A OUTPUT -o lo -j ACCEPT') self.filters.insert(2, '-A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT') self.filters.insert(3, '-A OUTPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT') return self
python
def insertSaneDefaults(self): """ Add sane defaults rules to the raw and filter tables """ self.raw.insert(0, '-A OUTPUT -o lo -j NOTRACK') self.raw.insert(1, '-A PREROUTING -i lo -j NOTRACK') self.filters.insert(0, '-A INPUT -i lo -j ACCEPT') self.filters.insert(1, '-A OUTPUT -o lo -j ACCEPT') self.filters.insert(2, '-A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT') self.filters.insert(3, '-A OUTPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT') return self
[ "def", "insertSaneDefaults", "(", "self", ")", ":", "self", ".", "raw", ".", "insert", "(", "0", ",", "'-A OUTPUT -o lo -j NOTRACK'", ")", "self", ".", "raw", ".", "insert", "(", "1", ",", "'-A PREROUTING -i lo -j NOTRACK'", ")", "self", ".", "filters", ".", "insert", "(", "0", ",", "'-A INPUT -i lo -j ACCEPT'", ")", "self", ".", "filters", ".", "insert", "(", "1", ",", "'-A OUTPUT -o lo -j ACCEPT'", ")", "self", ".", "filters", ".", "insert", "(", "2", ",", "'-A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT'", ")", "self", ".", "filters", ".", "insert", "(", "3", ",", "'-A OUTPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT'", ")", "return", "self" ]
Add sane defaults rules to the raw and filter tables
[ "Add", "sane", "defaults", "rules", "to", "the", "raw", "and", "filter", "tables" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L92-L100
mozilla/libnfldap
libnfldap.py
IPTables.appendDefaultDrop
def appendDefaultDrop(self): """ Add a DROP policy at the end of the rules """ self.filters.append('-A INPUT -j DROP') self.filters.append('-A OUTPUT -j DROP') self.filters.append('-A FORWARD -j DROP') return self
python
def appendDefaultDrop(self): """ Add a DROP policy at the end of the rules """ self.filters.append('-A INPUT -j DROP') self.filters.append('-A OUTPUT -j DROP') self.filters.append('-A FORWARD -j DROP') return self
[ "def", "appendDefaultDrop", "(", "self", ")", ":", "self", ".", "filters", ".", "append", "(", "'-A INPUT -j DROP'", ")", "self", ".", "filters", ".", "append", "(", "'-A OUTPUT -j DROP'", ")", "self", ".", "filters", ".", "append", "(", "'-A FORWARD -j DROP'", ")", "return", "self" ]
Add a DROP policy at the end of the rules
[ "Add", "a", "DROP", "policy", "at", "the", "end", "of", "the", "rules" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L102-L107
mozilla/libnfldap
libnfldap.py
IPTables.template
def template(self): """ Create a rules file in iptables-restore format """ s = Template(self._IPTABLES_TEMPLATE) return s.substitute(filtertable='\n'.join(self.filters), rawtable='\n'.join(self.raw), mangletable='\n'.join(self.mangle), nattable='\n'.join(self.nat), date=datetime.today())
python
def template(self): """ Create a rules file in iptables-restore format """ s = Template(self._IPTABLES_TEMPLATE) return s.substitute(filtertable='\n'.join(self.filters), rawtable='\n'.join(self.raw), mangletable='\n'.join(self.mangle), nattable='\n'.join(self.nat), date=datetime.today())
[ "def", "template", "(", "self", ")", ":", "s", "=", "Template", "(", "self", ".", "_IPTABLES_TEMPLATE", ")", "return", "s", ".", "substitute", "(", "filtertable", "=", "'\\n'", ".", "join", "(", "self", ".", "filters", ")", ",", "rawtable", "=", "'\\n'", ".", "join", "(", "self", ".", "raw", ")", ",", "mangletable", "=", "'\\n'", ".", "join", "(", "self", ".", "mangle", ")", ",", "nattable", "=", "'\\n'", ".", "join", "(", "self", ".", "nat", ")", ",", "date", "=", "datetime", ".", "today", "(", ")", ")" ]
Create a rules file in iptables-restore format
[ "Create", "a", "rules", "file", "in", "iptables", "-", "restore", "format" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L155-L162
mozilla/libnfldap
libnfldap.py
IPset.template
def template(self): """ Create a rules file in ipset --restore format """ s = Template(self._IPSET_TEMPLATE) return s.substitute(sets='\n'.join(self.sets), date=datetime.today())
python
def template(self): """ Create a rules file in ipset --restore format """ s = Template(self._IPSET_TEMPLATE) return s.substitute(sets='\n'.join(self.sets), date=datetime.today())
[ "def", "template", "(", "self", ")", ":", "s", "=", "Template", "(", "self", ".", "_IPSET_TEMPLATE", ")", "return", "s", ".", "substitute", "(", "sets", "=", "'\\n'", ".", "join", "(", "self", ".", "sets", ")", ",", "date", "=", "datetime", ".", "today", "(", ")", ")" ]
Create a rules file in ipset --restore format
[ "Create", "a", "rules", "file", "in", "ipset", "--", "restore", "format" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L210-L214
mozilla/libnfldap
libnfldap.py
LDAP.query
def query(self, base, filterstr, attrlist=None): """ wrapper to search_s """ return self.conn.search_s(base, ldap.SCOPE_SUBTREE, filterstr, attrlist)
python
def query(self, base, filterstr, attrlist=None): """ wrapper to search_s """ return self.conn.search_s(base, ldap.SCOPE_SUBTREE, filterstr, attrlist)
[ "def", "query", "(", "self", ",", "base", ",", "filterstr", ",", "attrlist", "=", "None", ")", ":", "return", "self", ".", "conn", ".", "search_s", "(", "base", ",", "ldap", ".", "SCOPE_SUBTREE", ",", "filterstr", ",", "attrlist", ")" ]
wrapper to search_s
[ "wrapper", "to", "search_s" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L228-L230
mozilla/libnfldap
libnfldap.py
LDAP.getUserByNumber
def getUserByNumber(self, base, uidNumber): """ search for a user in LDAP and return its DN and uid """ res = self.query(base, "uidNumber="+str(uidNumber), ['uid']) if len(res) > 1: raise InputError(uidNumber, "Multiple users found. Expecting one.") return res[0][0], res[0][1]['uid'][0]
python
def getUserByNumber(self, base, uidNumber): """ search for a user in LDAP and return its DN and uid """ res = self.query(base, "uidNumber="+str(uidNumber), ['uid']) if len(res) > 1: raise InputError(uidNumber, "Multiple users found. Expecting one.") return res[0][0], res[0][1]['uid'][0]
[ "def", "getUserByNumber", "(", "self", ",", "base", ",", "uidNumber", ")", ":", "res", "=", "self", ".", "query", "(", "base", ",", "\"uidNumber=\"", "+", "str", "(", "uidNumber", ")", ",", "[", "'uid'", "]", ")", "if", "len", "(", "res", ")", ">", "1", ":", "raise", "InputError", "(", "uidNumber", ",", "\"Multiple users found. Expecting one.\"", ")", "return", "res", "[", "0", "]", "[", "0", "]", ",", "res", "[", "0", "]", "[", "1", "]", "[", "'uid'", "]", "[", "0", "]" ]
search for a user in LDAP and return its DN and uid
[ "search", "for", "a", "user", "in", "LDAP", "and", "return", "its", "DN", "and", "uid" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L232-L237
mozilla/libnfldap
libnfldap.py
LDAP.getACLs
def getACLs(self, base, searchstr): """ Query LDAP to obtain the network ACLs of a given user, parse the ACLs, and return the results in a dict of the form acls[group][cidr] = description """ acls = dict() res = self.query(base, searchstr, ['cn', 'ipHostNumber']) for dn,attr in res: cn = attr['cn'][0] dests = dict() if attr.has_key('ipHostNumber'): for entry in attr['ipHostNumber']: dest = entry.split('#', 1)[0].replace(" ", "") if len(entry.split('#', 1)) == 2: desc = entry.split('#', 1)[1].strip() else: desc = "" if not is_ip(dest): print dest, desc raise InputError(dest, "Invalid IP format") dests[dest] = desc acls[cn] = dests return acls
python
def getACLs(self, base, searchstr): """ Query LDAP to obtain the network ACLs of a given user, parse the ACLs, and return the results in a dict of the form acls[group][cidr] = description """ acls = dict() res = self.query(base, searchstr, ['cn', 'ipHostNumber']) for dn,attr in res: cn = attr['cn'][0] dests = dict() if attr.has_key('ipHostNumber'): for entry in attr['ipHostNumber']: dest = entry.split('#', 1)[0].replace(" ", "") if len(entry.split('#', 1)) == 2: desc = entry.split('#', 1)[1].strip() else: desc = "" if not is_ip(dest): print dest, desc raise InputError(dest, "Invalid IP format") dests[dest] = desc acls[cn] = dests return acls
[ "def", "getACLs", "(", "self", ",", "base", ",", "searchstr", ")", ":", "acls", "=", "dict", "(", ")", "res", "=", "self", ".", "query", "(", "base", ",", "searchstr", ",", "[", "'cn'", ",", "'ipHostNumber'", "]", ")", "for", "dn", ",", "attr", "in", "res", ":", "cn", "=", "attr", "[", "'cn'", "]", "[", "0", "]", "dests", "=", "dict", "(", ")", "if", "attr", ".", "has_key", "(", "'ipHostNumber'", ")", ":", "for", "entry", "in", "attr", "[", "'ipHostNumber'", "]", ":", "dest", "=", "entry", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", ".", "replace", "(", "\" \"", ",", "\"\"", ")", "if", "len", "(", "entry", ".", "split", "(", "'#'", ",", "1", ")", ")", "==", "2", ":", "desc", "=", "entry", ".", "split", "(", "'#'", ",", "1", ")", "[", "1", "]", ".", "strip", "(", ")", "else", ":", "desc", "=", "\"\"", "if", "not", "is_ip", "(", "dest", ")", ":", "print", "dest", ",", "desc", "raise", "InputError", "(", "dest", ",", "\"Invalid IP format\"", ")", "dests", "[", "dest", "]", "=", "desc", "acls", "[", "cn", "]", "=", "dests", "return", "acls" ]
Query LDAP to obtain the network ACLs of a given user, parse the ACLs, and return the results in a dict of the form acls[group][cidr] = description
[ "Query", "LDAP", "to", "obtain", "the", "network", "ACLs", "of", "a", "given", "user", "parse", "the", "ACLs", "and", "return", "the", "results", "in", "a", "dict", "of", "the", "form", "acls", "[", "group", "]", "[", "cidr", "]", "=", "description" ]
train
https://github.com/mozilla/libnfldap/blob/27407a6ed6bfd07d1eba2824398c77e2fe98f968/libnfldap.py#L239-L262
InformaticsMatters/pipelines-utils
src/python/pipelines_utils/parameter_utils.py
splitValues
def splitValues(textStr): """Splits a comma-separated number sequence into a list (of floats). """ vals = textStr.split(",") nums = [] for v in vals: nums.append(float(v)) return nums
python
def splitValues(textStr): """Splits a comma-separated number sequence into a list (of floats). """ vals = textStr.split(",") nums = [] for v in vals: nums.append(float(v)) return nums
[ "def", "splitValues", "(", "textStr", ")", ":", "vals", "=", "textStr", ".", "split", "(", "\",\"", ")", "nums", "=", "[", "]", "for", "v", "in", "vals", ":", "nums", ".", "append", "(", "float", "(", "v", ")", ")", "return", "nums" ]
Splits a comma-separated number sequence into a list (of floats).
[ "Splits", "a", "comma", "-", "separated", "number", "sequence", "into", "a", "list", "(", "of", "floats", ")", "." ]
train
https://github.com/InformaticsMatters/pipelines-utils/blob/058aa6eceeff28c4ae402f6f58c58720bff0298e/src/python/pipelines_utils/parameter_utils.py#L49-L56
InformaticsMatters/pipelines-utils
src/python/pipelines_utils/parameter_utils.py
expandParameters
def expandParameters(*args): """Expands parameters (presented as tuples of lists and symbolic names) so that each is returned in a new list where each contains the same number of values. Each `arg` is a tuple containing two items: a list of values and a symbolic name. """ count = 1 for arg in args: count = max(len(arg[0]), count) results = [] for arg in args: results.append(expandValues(arg[0], count, args[1])) return tuple(results)
python
def expandParameters(*args): """Expands parameters (presented as tuples of lists and symbolic names) so that each is returned in a new list where each contains the same number of values. Each `arg` is a tuple containing two items: a list of values and a symbolic name. """ count = 1 for arg in args: count = max(len(arg[0]), count) results = [] for arg in args: results.append(expandValues(arg[0], count, args[1])) return tuple(results)
[ "def", "expandParameters", "(", "*", "args", ")", ":", "count", "=", "1", "for", "arg", "in", "args", ":", "count", "=", "max", "(", "len", "(", "arg", "[", "0", "]", ")", ",", "count", ")", "results", "=", "[", "]", "for", "arg", "in", "args", ":", "results", ".", "append", "(", "expandValues", "(", "arg", "[", "0", "]", ",", "count", ",", "args", "[", "1", "]", ")", ")", "return", "tuple", "(", "results", ")" ]
Expands parameters (presented as tuples of lists and symbolic names) so that each is returned in a new list where each contains the same number of values. Each `arg` is a tuple containing two items: a list of values and a symbolic name.
[ "Expands", "parameters", "(", "presented", "as", "tuples", "of", "lists", "and", "symbolic", "names", ")", "so", "that", "each", "is", "returned", "in", "a", "new", "list", "where", "each", "contains", "the", "same", "number", "of", "values", "." ]
train
https://github.com/InformaticsMatters/pipelines-utils/blob/058aa6eceeff28c4ae402f6f58c58720bff0298e/src/python/pipelines_utils/parameter_utils.py#L59-L73
InformaticsMatters/pipelines-utils
src/python/pipelines_utils/parameter_utils.py
expandValues
def expandValues(inputs, count, name): """Returns the input list with the length of `count`. If the list is [1] and the count is 3. [1,1,1] is returned. The list must be the count length or 1. Normally called from `expandParameters()` where `name` is the symbolic name of the input. """ if len(inputs) == count: expanded = inputs elif len(inputs) == 1: expanded = inputs * count else: raise ValueError('Incompatible number of values for ' + name) return expanded
python
def expandValues(inputs, count, name): """Returns the input list with the length of `count`. If the list is [1] and the count is 3. [1,1,1] is returned. The list must be the count length or 1. Normally called from `expandParameters()` where `name` is the symbolic name of the input. """ if len(inputs) == count: expanded = inputs elif len(inputs) == 1: expanded = inputs * count else: raise ValueError('Incompatible number of values for ' + name) return expanded
[ "def", "expandValues", "(", "inputs", ",", "count", ",", "name", ")", ":", "if", "len", "(", "inputs", ")", "==", "count", ":", "expanded", "=", "inputs", "elif", "len", "(", "inputs", ")", "==", "1", ":", "expanded", "=", "inputs", "*", "count", "else", ":", "raise", "ValueError", "(", "'Incompatible number of values for '", "+", "name", ")", "return", "expanded" ]
Returns the input list with the length of `count`. If the list is [1] and the count is 3. [1,1,1] is returned. The list must be the count length or 1. Normally called from `expandParameters()` where `name` is the symbolic name of the input.
[ "Returns", "the", "input", "list", "with", "the", "length", "of", "count", ".", "If", "the", "list", "is", "[", "1", "]", "and", "the", "count", "is", "3", ".", "[", "1", "1", "1", "]", "is", "returned", ".", "The", "list", "must", "be", "the", "count", "length", "or", "1", ".", "Normally", "called", "from", "expandParameters", "()", "where", "name", "is", "the", "symbolic", "name", "of", "the", "input", "." ]
train
https://github.com/InformaticsMatters/pipelines-utils/blob/058aa6eceeff28c4ae402f6f58c58720bff0298e/src/python/pipelines_utils/parameter_utils.py#L76-L88
kajala/django-jutil
jutil/request.py
get_geo_ip
def get_geo_ip(ip: str, exceptions: bool=False, timeout: int=10) -> dict: """ Returns geo IP info or empty dict if geoip query fails at http://ipstack.com. requires settings.IPSTACK_TOKEN set as valid access token to the API. Example replies: {'country_name': 'United States', 'country_code': 'US', 'region_code': 'TX', 'region_name': 'Texas', 'ip': '76.184.236.184', 'latitude': 33.1507, 'time_zone': 'America/Chicago', 'metro_code': 623, 'city': 'Frisco', 'longitude': -96.8236, 'zip_code': '75033'} {'latitude': 60.1641, 'country_name': 'Finland', 'zip_code': '02920', 'region_name': 'Uusimaa', 'city': 'Espoo', 'metro_code': 0, 'ip': '194.100.27.41', 'time_zone': 'Europe/Helsinki', 'country_code': 'FI', 'longitude': 24.7136, 'region_code': '18'} :param ip: str :param exceptions: if True raises Exception on failure :param timeout: timeout in seconds :return: dict """ import requests import traceback try: res = requests.get('http://api.ipstack.com/{}?access_key={}&format=1'.format(ip, settings.IPSTACK_TOKEN), timeout=timeout) if res.status_code != 200: if exceptions: raise Exception('api.ipstack.com HTTP {}'.format(res.status_code)) return {} return res.json() except Exception as e: msg = 'geoip({}) failed: {}'.format(ip, traceback.format_exc()) logger.error(msg) if exceptions: raise return {}
python
def get_geo_ip(ip: str, exceptions: bool=False, timeout: int=10) -> dict: """ Returns geo IP info or empty dict if geoip query fails at http://ipstack.com. requires settings.IPSTACK_TOKEN set as valid access token to the API. Example replies: {'country_name': 'United States', 'country_code': 'US', 'region_code': 'TX', 'region_name': 'Texas', 'ip': '76.184.236.184', 'latitude': 33.1507, 'time_zone': 'America/Chicago', 'metro_code': 623, 'city': 'Frisco', 'longitude': -96.8236, 'zip_code': '75033'} {'latitude': 60.1641, 'country_name': 'Finland', 'zip_code': '02920', 'region_name': 'Uusimaa', 'city': 'Espoo', 'metro_code': 0, 'ip': '194.100.27.41', 'time_zone': 'Europe/Helsinki', 'country_code': 'FI', 'longitude': 24.7136, 'region_code': '18'} :param ip: str :param exceptions: if True raises Exception on failure :param timeout: timeout in seconds :return: dict """ import requests import traceback try: res = requests.get('http://api.ipstack.com/{}?access_key={}&format=1'.format(ip, settings.IPSTACK_TOKEN), timeout=timeout) if res.status_code != 200: if exceptions: raise Exception('api.ipstack.com HTTP {}'.format(res.status_code)) return {} return res.json() except Exception as e: msg = 'geoip({}) failed: {}'.format(ip, traceback.format_exc()) logger.error(msg) if exceptions: raise return {}
[ "def", "get_geo_ip", "(", "ip", ":", "str", ",", "exceptions", ":", "bool", "=", "False", ",", "timeout", ":", "int", "=", "10", ")", "->", "dict", ":", "import", "requests", "import", "traceback", "try", ":", "res", "=", "requests", ".", "get", "(", "'http://api.ipstack.com/{}?access_key={}&format=1'", ".", "format", "(", "ip", ",", "settings", ".", "IPSTACK_TOKEN", ")", ",", "timeout", "=", "timeout", ")", "if", "res", ".", "status_code", "!=", "200", ":", "if", "exceptions", ":", "raise", "Exception", "(", "'api.ipstack.com HTTP {}'", ".", "format", "(", "res", ".", "status_code", ")", ")", "return", "{", "}", "return", "res", ".", "json", "(", ")", "except", "Exception", "as", "e", ":", "msg", "=", "'geoip({}) failed: {}'", ".", "format", "(", "ip", ",", "traceback", ".", "format_exc", "(", ")", ")", "logger", ".", "error", "(", "msg", ")", "if", "exceptions", ":", "raise", "return", "{", "}" ]
Returns geo IP info or empty dict if geoip query fails at http://ipstack.com. requires settings.IPSTACK_TOKEN set as valid access token to the API. Example replies: {'country_name': 'United States', 'country_code': 'US', 'region_code': 'TX', 'region_name': 'Texas', 'ip': '76.184.236.184', 'latitude': 33.1507, 'time_zone': 'America/Chicago', 'metro_code': 623, 'city': 'Frisco', 'longitude': -96.8236, 'zip_code': '75033'} {'latitude': 60.1641, 'country_name': 'Finland', 'zip_code': '02920', 'region_name': 'Uusimaa', 'city': 'Espoo', 'metro_code': 0, 'ip': '194.100.27.41', 'time_zone': 'Europe/Helsinki', 'country_code': 'FI', 'longitude': 24.7136, 'region_code': '18'} :param ip: str :param exceptions: if True raises Exception on failure :param timeout: timeout in seconds :return: dict
[ "Returns", "geo", "IP", "info", "or", "empty", "dict", "if", "geoip", "query", "fails", "at", "http", ":", "//", "ipstack", ".", "com", ".", "requires", "settings", ".", "IPSTACK_TOKEN", "set", "as", "valid", "access", "token", "to", "the", "API", "." ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/request.py#L8-L36
kajala/django-jutil
jutil/request.py
get_ip_info
def get_ip_info(ip: str, exceptions: bool=False, timeout: int=10) -> tuple: """ Returns (ip, country_code, host) tuple of the IP address. :param ip: IP address :param exceptions: Raise Exception or not :param timeout: Timeout in seconds. Note that timeout only affects geo IP part, not getting host name. :return: (ip, country_code, host) """ import traceback import socket if not ip: # localhost return None, '', '' host = '' country_code = get_geo_ip(ip, exceptions=exceptions, timeout=timeout).get('country_code', '') try: res = socket.gethostbyaddr(ip) host = res[0][:255] if ip else '' except Exception as e: msg = 'socket.gethostbyaddr({}) failed: {}'.format(ip, traceback.format_exc()) logger.error(msg) if exceptions: raise e return ip, country_code, host
python
def get_ip_info(ip: str, exceptions: bool=False, timeout: int=10) -> tuple: """ Returns (ip, country_code, host) tuple of the IP address. :param ip: IP address :param exceptions: Raise Exception or not :param timeout: Timeout in seconds. Note that timeout only affects geo IP part, not getting host name. :return: (ip, country_code, host) """ import traceback import socket if not ip: # localhost return None, '', '' host = '' country_code = get_geo_ip(ip, exceptions=exceptions, timeout=timeout).get('country_code', '') try: res = socket.gethostbyaddr(ip) host = res[0][:255] if ip else '' except Exception as e: msg = 'socket.gethostbyaddr({}) failed: {}'.format(ip, traceback.format_exc()) logger.error(msg) if exceptions: raise e return ip, country_code, host
[ "def", "get_ip_info", "(", "ip", ":", "str", ",", "exceptions", ":", "bool", "=", "False", ",", "timeout", ":", "int", "=", "10", ")", "->", "tuple", ":", "import", "traceback", "import", "socket", "if", "not", "ip", ":", "# localhost", "return", "None", ",", "''", ",", "''", "host", "=", "''", "country_code", "=", "get_geo_ip", "(", "ip", ",", "exceptions", "=", "exceptions", ",", "timeout", "=", "timeout", ")", ".", "get", "(", "'country_code'", ",", "''", ")", "try", ":", "res", "=", "socket", ".", "gethostbyaddr", "(", "ip", ")", "host", "=", "res", "[", "0", "]", "[", ":", "255", "]", "if", "ip", "else", "''", "except", "Exception", "as", "e", ":", "msg", "=", "'socket.gethostbyaddr({}) failed: {}'", ".", "format", "(", "ip", ",", "traceback", ".", "format_exc", "(", ")", ")", "logger", ".", "error", "(", "msg", ")", "if", "exceptions", ":", "raise", "e", "return", "ip", ",", "country_code", ",", "host" ]
Returns (ip, country_code, host) tuple of the IP address. :param ip: IP address :param exceptions: Raise Exception or not :param timeout: Timeout in seconds. Note that timeout only affects geo IP part, not getting host name. :return: (ip, country_code, host)
[ "Returns", "(", "ip", "country_code", "host", ")", "tuple", "of", "the", "IP", "address", ".", ":", "param", "ip", ":", "IP", "address", ":", "param", "exceptions", ":", "Raise", "Exception", "or", "not", ":", "param", "timeout", ":", "Timeout", "in", "seconds", ".", "Note", "that", "timeout", "only", "affects", "geo", "IP", "part", "not", "getting", "host", "name", ".", ":", "return", ":", "(", "ip", "country_code", "host", ")" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/request.py#L39-L61
cons3rt/pycons3rt
pycons3rt/awsapi/metadata.py
is_aws
def is_aws(): """Determines if this system is on AWS :return: bool True if this system is running on AWS """ log = logging.getLogger(mod_logger + '.is_aws') log.info('Querying AWS meta data URL: {u}'.format(u=metadata_url)) # Re-try logic for checking the AWS meta data URL retry_time_sec = 10 max_num_tries = 10 attempt_num = 1 while True: if attempt_num > max_num_tries: log.info('Unable to query the AWS meta data URL, this system is NOT running on AWS\n{e}') return False # Query the AWS meta data URL try: response = urllib.urlopen(metadata_url) except(IOError, OSError) as ex: log.warn('Failed to query the AWS meta data URL\n{e}'.format(e=str(ex))) attempt_num += 1 time.sleep(retry_time_sec) continue # Check the code if response.getcode() == 200: log.info('AWS metadata service returned code 200, this system is running on AWS') return True else: log.warn('AWS metadata service returned code: {c}'.format(c=response.getcode())) attempt_num += 1 time.sleep(retry_time_sec) continue
python
def is_aws(): """Determines if this system is on AWS :return: bool True if this system is running on AWS """ log = logging.getLogger(mod_logger + '.is_aws') log.info('Querying AWS meta data URL: {u}'.format(u=metadata_url)) # Re-try logic for checking the AWS meta data URL retry_time_sec = 10 max_num_tries = 10 attempt_num = 1 while True: if attempt_num > max_num_tries: log.info('Unable to query the AWS meta data URL, this system is NOT running on AWS\n{e}') return False # Query the AWS meta data URL try: response = urllib.urlopen(metadata_url) except(IOError, OSError) as ex: log.warn('Failed to query the AWS meta data URL\n{e}'.format(e=str(ex))) attempt_num += 1 time.sleep(retry_time_sec) continue # Check the code if response.getcode() == 200: log.info('AWS metadata service returned code 200, this system is running on AWS') return True else: log.warn('AWS metadata service returned code: {c}'.format(c=response.getcode())) attempt_num += 1 time.sleep(retry_time_sec) continue
[ "def", "is_aws", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.is_aws'", ")", "log", ".", "info", "(", "'Querying AWS meta data URL: {u}'", ".", "format", "(", "u", "=", "metadata_url", ")", ")", "# Re-try logic for checking the AWS meta data URL", "retry_time_sec", "=", "10", "max_num_tries", "=", "10", "attempt_num", "=", "1", "while", "True", ":", "if", "attempt_num", ">", "max_num_tries", ":", "log", ".", "info", "(", "'Unable to query the AWS meta data URL, this system is NOT running on AWS\\n{e}'", ")", "return", "False", "# Query the AWS meta data URL", "try", ":", "response", "=", "urllib", ".", "urlopen", "(", "metadata_url", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "ex", ":", "log", ".", "warn", "(", "'Failed to query the AWS meta data URL\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", ")", "attempt_num", "+=", "1", "time", ".", "sleep", "(", "retry_time_sec", ")", "continue", "# Check the code", "if", "response", ".", "getcode", "(", ")", "==", "200", ":", "log", ".", "info", "(", "'AWS metadata service returned code 200, this system is running on AWS'", ")", "return", "True", "else", ":", "log", ".", "warn", "(", "'AWS metadata service returned code: {c}'", ".", "format", "(", "c", "=", "response", ".", "getcode", "(", ")", ")", ")", "attempt_num", "+=", "1", "time", ".", "sleep", "(", "retry_time_sec", ")", "continue" ]
Determines if this system is on AWS :return: bool True if this system is running on AWS
[ "Determines", "if", "this", "system", "is", "on", "AWS" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/metadata.py#L44-L79
cons3rt/pycons3rt
pycons3rt/awsapi/metadata.py
get_instance_id
def get_instance_id(): """Gets the instance ID of this EC2 instance :return: String instance ID or None """ log = logging.getLogger(mod_logger + '.get_instance_id') # Exit if not running on AWS if not is_aws(): log.info('This machine is not running in AWS, exiting...') return instance_id_url = metadata_url + 'instance-id' try: response = urllib.urlopen(instance_id_url) except(IOError, OSError) as ex: msg = 'Unable to query URL to get instance ID: {u}\n{e}'. \ format(u=instance_id_url, e=ex) log.error(msg) return # Check the code if response.getcode() != 200: msg = 'There was a problem querying url: {u}, returned code: {c}, unable to get the instance-id'.format( u=instance_id_url, c=response.getcode()) log.error(msg) return instance_id = response.read() return instance_id
python
def get_instance_id(): """Gets the instance ID of this EC2 instance :return: String instance ID or None """ log = logging.getLogger(mod_logger + '.get_instance_id') # Exit if not running on AWS if not is_aws(): log.info('This machine is not running in AWS, exiting...') return instance_id_url = metadata_url + 'instance-id' try: response = urllib.urlopen(instance_id_url) except(IOError, OSError) as ex: msg = 'Unable to query URL to get instance ID: {u}\n{e}'. \ format(u=instance_id_url, e=ex) log.error(msg) return # Check the code if response.getcode() != 200: msg = 'There was a problem querying url: {u}, returned code: {c}, unable to get the instance-id'.format( u=instance_id_url, c=response.getcode()) log.error(msg) return instance_id = response.read() return instance_id
[ "def", "get_instance_id", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_instance_id'", ")", "# Exit if not running on AWS", "if", "not", "is_aws", "(", ")", ":", "log", ".", "info", "(", "'This machine is not running in AWS, exiting...'", ")", "return", "instance_id_url", "=", "metadata_url", "+", "'instance-id'", "try", ":", "response", "=", "urllib", ".", "urlopen", "(", "instance_id_url", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "ex", ":", "msg", "=", "'Unable to query URL to get instance ID: {u}\\n{e}'", ".", "format", "(", "u", "=", "instance_id_url", ",", "e", "=", "ex", ")", "log", ".", "error", "(", "msg", ")", "return", "# Check the code", "if", "response", ".", "getcode", "(", ")", "!=", "200", ":", "msg", "=", "'There was a problem querying url: {u}, returned code: {c}, unable to get the instance-id'", ".", "format", "(", "u", "=", "instance_id_url", ",", "c", "=", "response", ".", "getcode", "(", ")", ")", "log", ".", "error", "(", "msg", ")", "return", "instance_id", "=", "response", ".", "read", "(", ")", "return", "instance_id" ]
Gets the instance ID of this EC2 instance :return: String instance ID or None
[ "Gets", "the", "instance", "ID", "of", "this", "EC2", "instance" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/metadata.py#L82-L110
cons3rt/pycons3rt
pycons3rt/awsapi/metadata.py
get_vpc_id_from_mac_address
def get_vpc_id_from_mac_address(): """Gets the VPC ID for this EC2 instance :return: String instance ID or None """ log = logging.getLogger(mod_logger + '.get_vpc_id') # Exit if not running on AWS if not is_aws(): log.info('This machine is not running in AWS, exiting...') return # Get the primary interface MAC address to query the meta data service log.debug('Attempting to determine the primary interface MAC address...') try: mac_address = get_primary_mac_address() except AWSMetaDataError: _, ex, trace = sys.exc_info() msg = '{n}: Unable to determine the mac address, cannot determine VPC ID:\n{e}'.format( n=ex.__class__.__name__, e=str(ex)) log.error(msg) return vpc_id_url = metadata_url + 'network/interfaces/macs/' + mac_address + '/vpc-id' try: response = urllib.urlopen(vpc_id_url) except(IOError, OSError) as ex: msg = 'Unable to query URL to get VPC ID: {u}\n{e}'.format(u=vpc_id_url, e=ex) log.error(msg) return # Check the code if response.getcode() != 200: msg = 'There was a problem querying url: {u}, returned code: {c}, unable to get the vpc-id'.format( u=vpc_id_url, c=response.getcode()) log.error(msg) return vpc_id = response.read() return vpc_id
python
def get_vpc_id_from_mac_address(): """Gets the VPC ID for this EC2 instance :return: String instance ID or None """ log = logging.getLogger(mod_logger + '.get_vpc_id') # Exit if not running on AWS if not is_aws(): log.info('This machine is not running in AWS, exiting...') return # Get the primary interface MAC address to query the meta data service log.debug('Attempting to determine the primary interface MAC address...') try: mac_address = get_primary_mac_address() except AWSMetaDataError: _, ex, trace = sys.exc_info() msg = '{n}: Unable to determine the mac address, cannot determine VPC ID:\n{e}'.format( n=ex.__class__.__name__, e=str(ex)) log.error(msg) return vpc_id_url = metadata_url + 'network/interfaces/macs/' + mac_address + '/vpc-id' try: response = urllib.urlopen(vpc_id_url) except(IOError, OSError) as ex: msg = 'Unable to query URL to get VPC ID: {u}\n{e}'.format(u=vpc_id_url, e=ex) log.error(msg) return # Check the code if response.getcode() != 200: msg = 'There was a problem querying url: {u}, returned code: {c}, unable to get the vpc-id'.format( u=vpc_id_url, c=response.getcode()) log.error(msg) return vpc_id = response.read() return vpc_id
[ "def", "get_vpc_id_from_mac_address", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_vpc_id'", ")", "# Exit if not running on AWS", "if", "not", "is_aws", "(", ")", ":", "log", ".", "info", "(", "'This machine is not running in AWS, exiting...'", ")", "return", "# Get the primary interface MAC address to query the meta data service", "log", ".", "debug", "(", "'Attempting to determine the primary interface MAC address...'", ")", "try", ":", "mac_address", "=", "get_primary_mac_address", "(", ")", "except", "AWSMetaDataError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'{n}: Unable to determine the mac address, cannot determine VPC ID:\\n{e}'", ".", "format", "(", "n", "=", "ex", ".", "__class__", ".", "__name__", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "return", "vpc_id_url", "=", "metadata_url", "+", "'network/interfaces/macs/'", "+", "mac_address", "+", "'/vpc-id'", "try", ":", "response", "=", "urllib", ".", "urlopen", "(", "vpc_id_url", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "ex", ":", "msg", "=", "'Unable to query URL to get VPC ID: {u}\\n{e}'", ".", "format", "(", "u", "=", "vpc_id_url", ",", "e", "=", "ex", ")", "log", ".", "error", "(", "msg", ")", "return", "# Check the code", "if", "response", ".", "getcode", "(", ")", "!=", "200", ":", "msg", "=", "'There was a problem querying url: {u}, returned code: {c}, unable to get the vpc-id'", ".", "format", "(", "u", "=", "vpc_id_url", ",", "c", "=", "response", ".", "getcode", "(", ")", ")", "log", ".", "error", "(", "msg", ")", "return", "vpc_id", "=", "response", ".", "read", "(", ")", "return", "vpc_id" ]
Gets the VPC ID for this EC2 instance :return: String instance ID or None
[ "Gets", "the", "VPC", "ID", "for", "this", "EC2", "instance" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/metadata.py#L113-L151
cons3rt/pycons3rt
pycons3rt/awsapi/metadata.py
get_availability_zone
def get_availability_zone(): """Gets the AWS Availability Zone ID for this system :return: (str) Availability Zone ID where this system lives """ log = logging.getLogger(mod_logger + '.get_availability_zone') # Exit if not running on AWS if not is_aws(): log.info('This machine is not running in AWS, exiting...') return availability_zone_url = metadata_url + 'placement/availability-zone' try: response = urllib.urlopen(availability_zone_url) except(IOError, OSError) as ex: msg = 'Unable to query URL to get Availability Zone: {u}\n{e}'.format(u=availability_zone_url, e=ex) log.error(msg) return # Check the code if response.getcode() != 200: msg = 'There was a problem querying url: {u}, returned code: {c}, unable to get the Availability Zone'.format( u=availability_zone_url, c=response.getcode()) log.error(msg) return availability_zone = response.read() return availability_zone
python
def get_availability_zone(): """Gets the AWS Availability Zone ID for this system :return: (str) Availability Zone ID where this system lives """ log = logging.getLogger(mod_logger + '.get_availability_zone') # Exit if not running on AWS if not is_aws(): log.info('This machine is not running in AWS, exiting...') return availability_zone_url = metadata_url + 'placement/availability-zone' try: response = urllib.urlopen(availability_zone_url) except(IOError, OSError) as ex: msg = 'Unable to query URL to get Availability Zone: {u}\n{e}'.format(u=availability_zone_url, e=ex) log.error(msg) return # Check the code if response.getcode() != 200: msg = 'There was a problem querying url: {u}, returned code: {c}, unable to get the Availability Zone'.format( u=availability_zone_url, c=response.getcode()) log.error(msg) return availability_zone = response.read() return availability_zone
[ "def", "get_availability_zone", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_availability_zone'", ")", "# Exit if not running on AWS", "if", "not", "is_aws", "(", ")", ":", "log", ".", "info", "(", "'This machine is not running in AWS, exiting...'", ")", "return", "availability_zone_url", "=", "metadata_url", "+", "'placement/availability-zone'", "try", ":", "response", "=", "urllib", ".", "urlopen", "(", "availability_zone_url", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "ex", ":", "msg", "=", "'Unable to query URL to get Availability Zone: {u}\\n{e}'", ".", "format", "(", "u", "=", "availability_zone_url", ",", "e", "=", "ex", ")", "log", ".", "error", "(", "msg", ")", "return", "# Check the code", "if", "response", ".", "getcode", "(", ")", "!=", "200", ":", "msg", "=", "'There was a problem querying url: {u}, returned code: {c}, unable to get the Availability Zone'", ".", "format", "(", "u", "=", "availability_zone_url", ",", "c", "=", "response", ".", "getcode", "(", ")", ")", "log", ".", "error", "(", "msg", ")", "return", "availability_zone", "=", "response", ".", "read", "(", ")", "return", "availability_zone" ]
Gets the AWS Availability Zone ID for this system :return: (str) Availability Zone ID where this system lives
[ "Gets", "the", "AWS", "Availability", "Zone", "ID", "for", "this", "system" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/metadata.py#L195-L222
cons3rt/pycons3rt
pycons3rt/awsapi/metadata.py
get_region
def get_region(): """Gets the AWS Region ID for this system :return: (str) AWS Region ID where this system lives """ log = logging.getLogger(mod_logger + '.get_region') # First get the availability zone availability_zone = get_availability_zone() if availability_zone is None: msg = 'Unable to determine the Availability Zone for this system, cannot determine the AWS Region' log.error(msg) return # Strip of the last character to get the region region = availability_zone[:-1] return region
python
def get_region(): """Gets the AWS Region ID for this system :return: (str) AWS Region ID where this system lives """ log = logging.getLogger(mod_logger + '.get_region') # First get the availability zone availability_zone = get_availability_zone() if availability_zone is None: msg = 'Unable to determine the Availability Zone for this system, cannot determine the AWS Region' log.error(msg) return # Strip of the last character to get the region region = availability_zone[:-1] return region
[ "def", "get_region", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_region'", ")", "# First get the availability zone", "availability_zone", "=", "get_availability_zone", "(", ")", "if", "availability_zone", "is", "None", ":", "msg", "=", "'Unable to determine the Availability Zone for this system, cannot determine the AWS Region'", "log", ".", "error", "(", "msg", ")", "return", "# Strip of the last character to get the region", "region", "=", "availability_zone", "[", ":", "-", "1", "]", "return", "region" ]
Gets the AWS Region ID for this system :return: (str) AWS Region ID where this system lives
[ "Gets", "the", "AWS", "Region", "ID", "for", "this", "system" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/metadata.py#L225-L242
cons3rt/pycons3rt
pycons3rt/awsapi/metadata.py
get_primary_mac_address
def get_primary_mac_address(): """Determines the MAC address to use for querying the AWS meta data service for network related queries :return: (str) MAC address for the eth0 interface :raises: AWSMetaDataError """ log = logging.getLogger(mod_logger + '.get_primary_mac_address') log.debug('Attempting to determine the MAC address for eth0...') try: mac_address = netifaces.ifaddresses('eth0')[netifaces.AF_LINK][0]['addr'] except Exception: _, ex, trace = sys.exc_info() msg = '{n}: Unable to determine the eth0 mac address for this system:\n{e}'.format( n=ex.__class__.__name__, e=str(ex)) raise AWSMetaDataError, msg, trace return mac_address
python
def get_primary_mac_address(): """Determines the MAC address to use for querying the AWS meta data service for network related queries :return: (str) MAC address for the eth0 interface :raises: AWSMetaDataError """ log = logging.getLogger(mod_logger + '.get_primary_mac_address') log.debug('Attempting to determine the MAC address for eth0...') try: mac_address = netifaces.ifaddresses('eth0')[netifaces.AF_LINK][0]['addr'] except Exception: _, ex, trace = sys.exc_info() msg = '{n}: Unable to determine the eth0 mac address for this system:\n{e}'.format( n=ex.__class__.__name__, e=str(ex)) raise AWSMetaDataError, msg, trace return mac_address
[ "def", "get_primary_mac_address", "(", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_primary_mac_address'", ")", "log", ".", "debug", "(", "'Attempting to determine the MAC address for eth0...'", ")", "try", ":", "mac_address", "=", "netifaces", ".", "ifaddresses", "(", "'eth0'", ")", "[", "netifaces", ".", "AF_LINK", "]", "[", "0", "]", "[", "'addr'", "]", "except", "Exception", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'{n}: Unable to determine the eth0 mac address for this system:\\n{e}'", ".", "format", "(", "n", "=", "ex", ".", "__class__", ".", "__name__", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "AWSMetaDataError", ",", "msg", ",", "trace", "return", "mac_address" ]
Determines the MAC address to use for querying the AWS meta data service for network related queries :return: (str) MAC address for the eth0 interface :raises: AWSMetaDataError
[ "Determines", "the", "MAC", "address", "to", "use", "for", "querying", "the", "AWS", "meta", "data", "service", "for", "network", "related", "queries" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/metadata.py#L245-L261
MarcAureleCoste/sqla-filters
src/sqla_filters/parser/base/base_sqla_parser.py
BaseSqlaParser.attr_sep
def attr_sep(self, new_sep: str) -> None: """Set the new value for the attribute separator. When the new value is assigned a new tree is generated. """ self._attr_sep = new_sep self._filters_tree = self._generate_filters_tree()
python
def attr_sep(self, new_sep: str) -> None: """Set the new value for the attribute separator. When the new value is assigned a new tree is generated. """ self._attr_sep = new_sep self._filters_tree = self._generate_filters_tree()
[ "def", "attr_sep", "(", "self", ",", "new_sep", ":", "str", ")", "->", "None", ":", "self", ".", "_attr_sep", "=", "new_sep", "self", ".", "_filters_tree", "=", "self", ".", "_generate_filters_tree", "(", ")" ]
Set the new value for the attribute separator. When the new value is assigned a new tree is generated.
[ "Set", "the", "new", "value", "for", "the", "attribute", "separator", ".", "When", "the", "new", "value", "is", "assigned", "a", "new", "tree", "is", "generated", "." ]
train
https://github.com/MarcAureleCoste/sqla-filters/blob/a9596f660a11d275bf0e831ecd9e502b0af2a087/src/sqla_filters/parser/base/base_sqla_parser.py#L23-L29
kajala/django-jutil
jutil/urls.py
url_equals
def url_equals(a: str, b: str) -> bool: """ Compares two URLs/paths and returns True if they point to same URI. For example, querystring parameters can be different order but URLs are still equal. :param a: URL/path :param b: URL/path :return: True if URLs/paths are equal """ from urllib.parse import urlparse, parse_qsl a2 = list(urlparse(a)) b2 = list(urlparse(b)) a2[4] = dict(parse_qsl(a2[4])) b2[4] = dict(parse_qsl(b2[4])) return a2 == b2
python
def url_equals(a: str, b: str) -> bool: """ Compares two URLs/paths and returns True if they point to same URI. For example, querystring parameters can be different order but URLs are still equal. :param a: URL/path :param b: URL/path :return: True if URLs/paths are equal """ from urllib.parse import urlparse, parse_qsl a2 = list(urlparse(a)) b2 = list(urlparse(b)) a2[4] = dict(parse_qsl(a2[4])) b2[4] = dict(parse_qsl(b2[4])) return a2 == b2
[ "def", "url_equals", "(", "a", ":", "str", ",", "b", ":", "str", ")", "->", "bool", ":", "from", "urllib", ".", "parse", "import", "urlparse", ",", "parse_qsl", "a2", "=", "list", "(", "urlparse", "(", "a", ")", ")", "b2", "=", "list", "(", "urlparse", "(", "b", ")", ")", "a2", "[", "4", "]", "=", "dict", "(", "parse_qsl", "(", "a2", "[", "4", "]", ")", ")", "b2", "[", "4", "]", "=", "dict", "(", "parse_qsl", "(", "b2", "[", "4", "]", ")", ")", "return", "a2", "==", "b2" ]
Compares two URLs/paths and returns True if they point to same URI. For example, querystring parameters can be different order but URLs are still equal. :param a: URL/path :param b: URL/path :return: True if URLs/paths are equal
[ "Compares", "two", "URLs", "/", "paths", "and", "returns", "True", "if", "they", "point", "to", "same", "URI", ".", "For", "example", "querystring", "parameters", "can", "be", "different", "order", "but", "URLs", "are", "still", "equal", ".", ":", "param", "a", ":", "URL", "/", "path", ":", "param", "b", ":", "URL", "/", "path", ":", "return", ":", "True", "if", "URLs", "/", "paths", "are", "equal" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/urls.py#L1-L14
kajala/django-jutil
jutil/urls.py
url_mod
def url_mod(url: str, new_params: dict) -> str: """ Modifies existing URL by setting/overriding specified query string parameters. Note: Does not support multiple querystring parameters with identical name. :param url: Base URL/path to modify :param new_params: Querystring parameters to set/override (dict) :return: New URL/path """ from urllib.parse import urlparse, parse_qsl, urlunparse, urlencode res = urlparse(url) query_params = dict(parse_qsl(res.query)) for k, v in new_params.items(): if v is None: query_params[str(k)] = '' else: query_params[str(k)] = str(v) parts = list(res) parts[4] = urlencode(query_params) return urlunparse(parts)
python
def url_mod(url: str, new_params: dict) -> str: """ Modifies existing URL by setting/overriding specified query string parameters. Note: Does not support multiple querystring parameters with identical name. :param url: Base URL/path to modify :param new_params: Querystring parameters to set/override (dict) :return: New URL/path """ from urllib.parse import urlparse, parse_qsl, urlunparse, urlencode res = urlparse(url) query_params = dict(parse_qsl(res.query)) for k, v in new_params.items(): if v is None: query_params[str(k)] = '' else: query_params[str(k)] = str(v) parts = list(res) parts[4] = urlencode(query_params) return urlunparse(parts)
[ "def", "url_mod", "(", "url", ":", "str", ",", "new_params", ":", "dict", ")", "->", "str", ":", "from", "urllib", ".", "parse", "import", "urlparse", ",", "parse_qsl", ",", "urlunparse", ",", "urlencode", "res", "=", "urlparse", "(", "url", ")", "query_params", "=", "dict", "(", "parse_qsl", "(", "res", ".", "query", ")", ")", "for", "k", ",", "v", "in", "new_params", ".", "items", "(", ")", ":", "if", "v", "is", "None", ":", "query_params", "[", "str", "(", "k", ")", "]", "=", "''", "else", ":", "query_params", "[", "str", "(", "k", ")", "]", "=", "str", "(", "v", ")", "parts", "=", "list", "(", "res", ")", "parts", "[", "4", "]", "=", "urlencode", "(", "query_params", ")", "return", "urlunparse", "(", "parts", ")" ]
Modifies existing URL by setting/overriding specified query string parameters. Note: Does not support multiple querystring parameters with identical name. :param url: Base URL/path to modify :param new_params: Querystring parameters to set/override (dict) :return: New URL/path
[ "Modifies", "existing", "URL", "by", "setting", "/", "overriding", "specified", "query", "string", "parameters", ".", "Note", ":", "Does", "not", "support", "multiple", "querystring", "parameters", "with", "identical", "name", ".", ":", "param", "url", ":", "Base", "URL", "/", "path", "to", "modify", ":", "param", "new_params", ":", "Querystring", "parameters", "to", "set", "/", "override", "(", "dict", ")", ":", "return", ":", "New", "URL", "/", "path" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/urls.py#L17-L35
kajala/django-jutil
jutil/urls.py
url_host
def url_host(url: str) -> str: """ Parses hostname from URL. :param url: URL :return: hostname """ from urllib.parse import urlparse res = urlparse(url) return res.netloc.split(':')[0] if res.netloc else ''
python
def url_host(url: str) -> str: """ Parses hostname from URL. :param url: URL :return: hostname """ from urllib.parse import urlparse res = urlparse(url) return res.netloc.split(':')[0] if res.netloc else ''
[ "def", "url_host", "(", "url", ":", "str", ")", "->", "str", ":", "from", "urllib", ".", "parse", "import", "urlparse", "res", "=", "urlparse", "(", "url", ")", "return", "res", ".", "netloc", ".", "split", "(", "':'", ")", "[", "0", "]", "if", "res", ".", "netloc", "else", "''" ]
Parses hostname from URL. :param url: URL :return: hostname
[ "Parses", "hostname", "from", "URL", ".", ":", "param", "url", ":", "URL", ":", "return", ":", "hostname" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/urls.py#L38-L46
Vital-Fernandez/dazer
bin/lib/CodeTools/various.py
vitools.ufloatDict_nominal
def ufloatDict_nominal(self, ufloat_dict): 'This gives us a dictionary of nominal values from a dictionary of uncertainties' return OrderedDict(izip(ufloat_dict.keys(), map(lambda x: x.nominal_value, ufloat_dict.values())))
python
def ufloatDict_nominal(self, ufloat_dict): 'This gives us a dictionary of nominal values from a dictionary of uncertainties' return OrderedDict(izip(ufloat_dict.keys(), map(lambda x: x.nominal_value, ufloat_dict.values())))
[ "def", "ufloatDict_nominal", "(", "self", ",", "ufloat_dict", ")", ":", "return", "OrderedDict", "(", "izip", "(", "ufloat_dict", ".", "keys", "(", ")", ",", "map", "(", "lambda", "x", ":", "x", ".", "nominal_value", ",", "ufloat_dict", ".", "values", "(", ")", ")", ")", ")" ]
This gives us a dictionary of nominal values from a dictionary of uncertainties
[ "This", "gives", "us", "a", "dictionary", "of", "nominal", "values", "from", "a", "dictionary", "of", "uncertainties" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/CodeTools/various.py#L75-L77
Vital-Fernandez/dazer
bin/lib/CodeTools/various.py
vitools.ufloatDict_stdev
def ufloatDict_stdev(self, ufloat_dict): 'This gives us a dictionary of nominal values from a dictionary of uncertainties' return OrderedDict(izip(ufloat_dict.keys(), map(lambda x: x.std_dev, ufloat_dict.values())))
python
def ufloatDict_stdev(self, ufloat_dict): 'This gives us a dictionary of nominal values from a dictionary of uncertainties' return OrderedDict(izip(ufloat_dict.keys(), map(lambda x: x.std_dev, ufloat_dict.values())))
[ "def", "ufloatDict_stdev", "(", "self", ",", "ufloat_dict", ")", ":", "return", "OrderedDict", "(", "izip", "(", "ufloat_dict", ".", "keys", "(", ")", ",", "map", "(", "lambda", "x", ":", "x", ".", "std_dev", ",", "ufloat_dict", ".", "values", "(", ")", ")", ")", ")" ]
This gives us a dictionary of nominal values from a dictionary of uncertainties
[ "This", "gives", "us", "a", "dictionary", "of", "nominal", "values", "from", "a", "dictionary", "of", "uncertainties" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/CodeTools/various.py#L80-L82
novopl/peltak
src/peltak/logic/docs.py
docs
def docs(recreate, gen_index, run_doctests): # type: (bool, bool, bool) -> None """ Build the documentation for the project. Args: recreate (bool): If set to **True**, the build and output directories will be cleared prior to generating the docs. gen_index (bool): If set to **True**, it will generate top-level index file for the reference documentation. run_doctests (bool): Set to **True** if you want to run doctests after the documentation is generated. pretend (bool): If set to **True**, do not actually execute any shell commands, just print the command that would be executed. """ build_dir = conf.get_path('build_dir', '.build') docs_dir = conf.get_path('docs.path', 'docs') refdoc_paths = conf.get('docs.reference', []) docs_html_dir = conf.get_path('docs.out', os.path.join(docs_dir, 'html')) docs_tests_dir = conf.get_path('docs.tests_out', os.path.join(docs_dir, 'doctest')) docs_build_dir = os.path.join(build_dir, 'docs') if recreate: for path in (docs_html_dir, docs_build_dir): if os.path.exists(path): log.info("<91>Deleting <94>{}".format(path)) shutil.rmtree(path) if refdoc_paths: gen_ref_docs(gen_index) else: log.err('Not generating any reference documentation - ' 'No docs.reference specified in config') with conf.within_proj_dir(docs_dir): log.info('Building docs') shell.run('sphinx-build -b html -d {build} {docs} {out}'.format( build=docs_build_dir, docs=docs_dir, out=docs_html_dir, )) if run_doctests: log.info('Running doctests') shell.run('sphinx-build -b doctest -d {build} {docs} {out}'.format( build=docs_build_dir, docs=docs_dir, out=docs_tests_dir, )) log.info('You can view the docs by browsing to <34>file://{}'.format( os.path.join(docs_html_dir, 'index.html') ))
python
def docs(recreate, gen_index, run_doctests): # type: (bool, bool, bool) -> None """ Build the documentation for the project. Args: recreate (bool): If set to **True**, the build and output directories will be cleared prior to generating the docs. gen_index (bool): If set to **True**, it will generate top-level index file for the reference documentation. run_doctests (bool): Set to **True** if you want to run doctests after the documentation is generated. pretend (bool): If set to **True**, do not actually execute any shell commands, just print the command that would be executed. """ build_dir = conf.get_path('build_dir', '.build') docs_dir = conf.get_path('docs.path', 'docs') refdoc_paths = conf.get('docs.reference', []) docs_html_dir = conf.get_path('docs.out', os.path.join(docs_dir, 'html')) docs_tests_dir = conf.get_path('docs.tests_out', os.path.join(docs_dir, 'doctest')) docs_build_dir = os.path.join(build_dir, 'docs') if recreate: for path in (docs_html_dir, docs_build_dir): if os.path.exists(path): log.info("<91>Deleting <94>{}".format(path)) shutil.rmtree(path) if refdoc_paths: gen_ref_docs(gen_index) else: log.err('Not generating any reference documentation - ' 'No docs.reference specified in config') with conf.within_proj_dir(docs_dir): log.info('Building docs') shell.run('sphinx-build -b html -d {build} {docs} {out}'.format( build=docs_build_dir, docs=docs_dir, out=docs_html_dir, )) if run_doctests: log.info('Running doctests') shell.run('sphinx-build -b doctest -d {build} {docs} {out}'.format( build=docs_build_dir, docs=docs_dir, out=docs_tests_dir, )) log.info('You can view the docs by browsing to <34>file://{}'.format( os.path.join(docs_html_dir, 'index.html') ))
[ "def", "docs", "(", "recreate", ",", "gen_index", ",", "run_doctests", ")", ":", "# type: (bool, bool, bool) -> None", "build_dir", "=", "conf", ".", "get_path", "(", "'build_dir'", ",", "'.build'", ")", "docs_dir", "=", "conf", ".", "get_path", "(", "'docs.path'", ",", "'docs'", ")", "refdoc_paths", "=", "conf", ".", "get", "(", "'docs.reference'", ",", "[", "]", ")", "docs_html_dir", "=", "conf", ".", "get_path", "(", "'docs.out'", ",", "os", ".", "path", ".", "join", "(", "docs_dir", ",", "'html'", ")", ")", "docs_tests_dir", "=", "conf", ".", "get_path", "(", "'docs.tests_out'", ",", "os", ".", "path", ".", "join", "(", "docs_dir", ",", "'doctest'", ")", ")", "docs_build_dir", "=", "os", ".", "path", ".", "join", "(", "build_dir", ",", "'docs'", ")", "if", "recreate", ":", "for", "path", "in", "(", "docs_html_dir", ",", "docs_build_dir", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "log", ".", "info", "(", "\"<91>Deleting <94>{}\"", ".", "format", "(", "path", ")", ")", "shutil", ".", "rmtree", "(", "path", ")", "if", "refdoc_paths", ":", "gen_ref_docs", "(", "gen_index", ")", "else", ":", "log", ".", "err", "(", "'Not generating any reference documentation - '", "'No docs.reference specified in config'", ")", "with", "conf", ".", "within_proj_dir", "(", "docs_dir", ")", ":", "log", ".", "info", "(", "'Building docs'", ")", "shell", ".", "run", "(", "'sphinx-build -b html -d {build} {docs} {out}'", ".", "format", "(", "build", "=", "docs_build_dir", ",", "docs", "=", "docs_dir", ",", "out", "=", "docs_html_dir", ",", ")", ")", "if", "run_doctests", ":", "log", ".", "info", "(", "'Running doctests'", ")", "shell", ".", "run", "(", "'sphinx-build -b doctest -d {build} {docs} {out}'", ".", "format", "(", "build", "=", "docs_build_dir", ",", "docs", "=", "docs_dir", ",", "out", "=", "docs_tests_dir", ",", ")", ")", "log", ".", "info", "(", "'You can view the docs by browsing to <34>file://{}'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "docs_html_dir", ",", "'index.html'", ")", ")", ")" ]
Build the documentation for the project. Args: recreate (bool): If set to **True**, the build and output directories will be cleared prior to generating the docs. gen_index (bool): If set to **True**, it will generate top-level index file for the reference documentation. run_doctests (bool): Set to **True** if you want to run doctests after the documentation is generated. pretend (bool): If set to **True**, do not actually execute any shell commands, just print the command that would be executed.
[ "Build", "the", "documentation", "for", "the", "project", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/logic/docs.py#L32-L89
novopl/peltak
src/peltak/logic/docs.py
gen_ref_docs
def gen_ref_docs(gen_index=False): # type: (int, bool) -> None """ Generate reference documentation for the project. This will use **sphinx-refdoc** to generate the source .rst files for the reference documentation. Args: gen_index (bool): Set it to **True** if you want to generate the index file with the list of top-level packages. This is set to default as in most cases you only have one package per project so you can link directly to that package reference (and if index were generated sphinx would complain about file not included in toctree). """ try: from refdoc import generate_docs except ImportError as ex: msg = ("You need to install sphinx-refdoc if you want to generate " "code reference docs.") print(msg, file=sys.stderr) log.err("Exception: {}".format(ex)) sys.exit(-1) pretend = context.get('pretend', False) docs_dir = conf.get_path('docs.path', 'docs') docs_ref_dir = os.path.join(docs_dir, 'ref') refdoc_paths = conf.get('docs.reference', []) if os.path.exists(docs_ref_dir): if not pretend: log.info('Removing existing reference docs') shutil.rmtree(docs_ref_dir) else: log.info('Would remove old reference docs') args = { 'out_dir': docs_ref_dir, 'verbose': context.get('verbose', 0), } if gen_index: args['gen_index'] = True pkg_paths = [conf.proj_path(p) for p in refdoc_paths] if not pretend: log.info('Generating reference documentation') generate_docs(pkg_paths, **args) else: log.info("Would generate reference docs with the following params") shell.cprint('<90>{}', util.yaml_dump(args).rstrip()) shell.cprint('<90>paths:\n<34>{}', util.yaml_dump(pkg_paths).rstrip())
python
def gen_ref_docs(gen_index=False): # type: (int, bool) -> None """ Generate reference documentation for the project. This will use **sphinx-refdoc** to generate the source .rst files for the reference documentation. Args: gen_index (bool): Set it to **True** if you want to generate the index file with the list of top-level packages. This is set to default as in most cases you only have one package per project so you can link directly to that package reference (and if index were generated sphinx would complain about file not included in toctree). """ try: from refdoc import generate_docs except ImportError as ex: msg = ("You need to install sphinx-refdoc if you want to generate " "code reference docs.") print(msg, file=sys.stderr) log.err("Exception: {}".format(ex)) sys.exit(-1) pretend = context.get('pretend', False) docs_dir = conf.get_path('docs.path', 'docs') docs_ref_dir = os.path.join(docs_dir, 'ref') refdoc_paths = conf.get('docs.reference', []) if os.path.exists(docs_ref_dir): if not pretend: log.info('Removing existing reference docs') shutil.rmtree(docs_ref_dir) else: log.info('Would remove old reference docs') args = { 'out_dir': docs_ref_dir, 'verbose': context.get('verbose', 0), } if gen_index: args['gen_index'] = True pkg_paths = [conf.proj_path(p) for p in refdoc_paths] if not pretend: log.info('Generating reference documentation') generate_docs(pkg_paths, **args) else: log.info("Would generate reference docs with the following params") shell.cprint('<90>{}', util.yaml_dump(args).rstrip()) shell.cprint('<90>paths:\n<34>{}', util.yaml_dump(pkg_paths).rstrip())
[ "def", "gen_ref_docs", "(", "gen_index", "=", "False", ")", ":", "# type: (int, bool) -> None", "try", ":", "from", "refdoc", "import", "generate_docs", "except", "ImportError", "as", "ex", ":", "msg", "=", "(", "\"You need to install sphinx-refdoc if you want to generate \"", "\"code reference docs.\"", ")", "print", "(", "msg", ",", "file", "=", "sys", ".", "stderr", ")", "log", ".", "err", "(", "\"Exception: {}\"", ".", "format", "(", "ex", ")", ")", "sys", ".", "exit", "(", "-", "1", ")", "pretend", "=", "context", ".", "get", "(", "'pretend'", ",", "False", ")", "docs_dir", "=", "conf", ".", "get_path", "(", "'docs.path'", ",", "'docs'", ")", "docs_ref_dir", "=", "os", ".", "path", ".", "join", "(", "docs_dir", ",", "'ref'", ")", "refdoc_paths", "=", "conf", ".", "get", "(", "'docs.reference'", ",", "[", "]", ")", "if", "os", ".", "path", ".", "exists", "(", "docs_ref_dir", ")", ":", "if", "not", "pretend", ":", "log", ".", "info", "(", "'Removing existing reference docs'", ")", "shutil", ".", "rmtree", "(", "docs_ref_dir", ")", "else", ":", "log", ".", "info", "(", "'Would remove old reference docs'", ")", "args", "=", "{", "'out_dir'", ":", "docs_ref_dir", ",", "'verbose'", ":", "context", ".", "get", "(", "'verbose'", ",", "0", ")", ",", "}", "if", "gen_index", ":", "args", "[", "'gen_index'", "]", "=", "True", "pkg_paths", "=", "[", "conf", ".", "proj_path", "(", "p", ")", "for", "p", "in", "refdoc_paths", "]", "if", "not", "pretend", ":", "log", ".", "info", "(", "'Generating reference documentation'", ")", "generate_docs", "(", "pkg_paths", ",", "*", "*", "args", ")", "else", ":", "log", ".", "info", "(", "\"Would generate reference docs with the following params\"", ")", "shell", ".", "cprint", "(", "'<90>{}'", ",", "util", ".", "yaml_dump", "(", "args", ")", ".", "rstrip", "(", ")", ")", "shell", ".", "cprint", "(", "'<90>paths:\\n<34>{}'", ",", "util", ".", "yaml_dump", "(", "pkg_paths", ")", ".", "rstrip", "(", ")", ")" ]
Generate reference documentation for the project. This will use **sphinx-refdoc** to generate the source .rst files for the reference documentation. Args: gen_index (bool): Set it to **True** if you want to generate the index file with the list of top-level packages. This is set to default as in most cases you only have one package per project so you can link directly to that package reference (and if index were generated sphinx would complain about file not included in toctree).
[ "Generate", "reference", "documentation", "for", "the", "project", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/logic/docs.py#L92-L146
ebob9/cloudgenix-idname
cloudgenix_idname/__init__.py
generate_id_name_map
def generate_id_name_map(sdk, reverse=False): """ Generate the ID-NAME map dict :param sdk: CloudGenix API constructor :param reverse: Generate reverse name-> ID map as well, return tuple with both. :return: ID Name dictionary """ global_id_name_dict = {} global_name_id_dict = {} # system struct system_list = [] # Global lookup dictionary for sub items if_id_to_name = {} global_swi_id = {} global_ln_id = {} swi_to_wan_network_dict = {} swi_to_site_dict = {} wan_network_to_swi_dict = {} all_anynets = {} all_vpns = {} swi_id_name_dict = {} site_swi_dict = {} path_id_to_name = {} vpn_id_to_anynet_id = {} # Create xlation dicts and lists. logger.info("Caching Operators..") id_operator_dict, operator_id_dict = operators_to_name_dict(sdk) if id_operator_dict: global_id_name_dict.update(id_operator_dict) global_name_id_dict.update(operator_id_dict) if operator_id_dict: global_name_id_dict.update(operator_id_dict) logger.info("Caching Sites..") id_site_dict, site_id_dict, site_id_list, site_info_dict = siteid_to_name_dict(sdk) global_id_name_dict.update(id_site_dict) global_name_id_dict.update(site_id_dict) logger.info("Caching Elements..") id_element_dict, element_id_dict, element_site_dict, element_id_list = elements_to_name_dict(sdk) global_id_name_dict.update(id_element_dict) global_name_id_dict.update(element_id_dict) logger.info("Caching WAN Networks..") id_wannetwork_dict, name_wannetwork_id_dict, wannetwork_id_list, wannetwork_type_dict = wan_network_dicts(sdk) global_id_name_dict.update(id_wannetwork_dict) global_name_id_dict.update(name_wannetwork_id_dict) logger.info("Caching Circuit Catagories..") id_circuit_categories, name_circuit_categories = circuit_categories_dicts(sdk) global_id_name_dict.update(id_circuit_categories) global_name_id_dict.update(name_circuit_categories) logger.info("Caching Network Contexts..") id_network_contexts, name_circuit_contexts = network_context_dicts(sdk) global_id_name_dict.update(id_network_contexts) global_name_id_dict.update(name_circuit_contexts) logger.info("Caching Appdefs..") id_appdef_dict, name_appdef_dict, appdef_id_list = appdefs_to_name_dict(sdk) global_id_name_dict.update(id_appdef_dict) global_name_id_dict.update(name_appdef_dict) logger.info("Caching Policysets..") id_policyset_dict, name_policyset_dict, policyset_id_list = policyset_to_name_dict(sdk) global_id_name_dict.update(id_policyset_dict) global_name_id_dict.update(name_policyset_dict) logger.info("Caching Security Policysets..") id_securitypolicyset_dict, name_securitypolicyset_dict, \ securitypolicyset_id_list = securitypolicyset_to_name_dict(sdk) global_id_name_dict.update(id_securitypolicyset_dict) global_name_id_dict.update(name_securitypolicyset_dict) logger.info("Caching Security Zones..") id_securityzone_dict, securityzone_id_dict, securityzone_id_list = securityzone_to_name_dict(sdk) global_id_name_dict.update(id_securityzone_dict) global_name_id_dict.update(securityzone_id_dict) id_interface_dict = {} logger.info("Filling Network Site->Element->Interface table..") for site in site_id_list: elements = [] swi_id_dict = {} ln_id_dict = {} # enumerate elements for element in element_id_list: # Is this element bound to a site? site_in = element_site_dict.get(element, None) # if it is bound, and bound to this site, add to list. if site_in and site_in == site: # Query interfaces interfaces_list, if_id_to_name_item, if_name_to_id_item, _, \ _, if_id_data_entry = interface_query(site, element, sdk) # add the element to the list elements.append({ 'id': element, 'name': id_element_dict.get(element, ""), 'interfaces': interfaces_list }) # add the if id name mapping to the main dict if_id_to_name.update(if_id_to_name_item) # update grand interface list id_interface_dict.update(if_id_data_entry) system_list.append({ 'id': site, 'name': id_site_dict.get(site, ""), 'elements': elements }) # query Site WAN Interface info resp = sdk.get.waninterfaces(site) swi_status = resp.cgx_status swi_query = resp.cgx_content if swi_status: # iterate all the site wan interfaces for current_swi in swi_query.get('items', []): # get the WN bound to the SWI. wan_network_id = current_swi.get('network_id', "") swi_id = current_swi.get('id', "") name = current_swi.get('name') if name and swi_id: swi_id_name_dict[swi_id] = name elif swi_id and wan_network_id: # Handle SWI with no name. wan_network_name = id_wannetwork_dict.get(wan_network_id, wan_network_id) swi_id_name_dict[swi_id] = "Circuit to {0}".format(wan_network_name) if swi_id: # update SWI -> Site xlation dict swi_to_site_dict[swi_id] = site # get the SWIs if wan_network_id and swi_id: logger.debug('SWI_ID = SITE: {0} = {1}'.format(swi_id, site)) # query existing wan_network_to_swi dict if entry exists. existing_swi_list = wan_network_to_swi_dict.get(wan_network_id, []) # update swi -> WN xlate dict swi_to_wan_network_dict[swi_id] = wan_network_id # update WN -> swi xlate dict existing_swi_list.append(swi_id) wan_network_to_swi_dict[wan_network_id] = existing_swi_list # add to global global_swi_id.update(swi_id_name_dict) # query LAN Network info resp = sdk.get.lannetworks(site) ln_status = resp.cgx_status ln_query = resp.cgx_content if ln_status: for ln in ln_query.get('items'): ln_id = ln.get('id') ln_name = ln.get('name') if ln_id and ln_name: ln_id_dict[ln_id] = ln_name # add to global global_ln_id.update(ln_id_dict) logger.info("Loading VPN topology information for {0} sites, please wait...".format(len(site_id_list))) # add all interface IDs # note - can't reliably make reverse name to ID items here, as they are not global. global_id_name_dict.update(if_id_to_name) global_id_name_dict.update(global_swi_id) global_id_name_dict.update(global_ln_id) for site in site_id_list: site_swi_list = [] query = { "type": "basenet", "nodes": [ site ] } status = False rest_call_retry = 0 resp = sdk.post.topology(query) status = resp.cgx_status topology = resp.cgx_content if status and topology: # iterate topology. We need to iterate all of the matching SWIs, and existing anynet connections (sorted). logger.debug("TOPOLOGY: {0}".format(json.dumps(topology, indent=4))) for link in topology.get('links', []): link_type = link.get('type', "") # if an anynet link (SWI to SWI) if link_type in ["anynet", "public-anynet", "private-anynet"]: # vpn record, check for uniqueness. # 4.4.1 source_swi = link.get('source_wan_if_id') if not source_swi: # 4.3.x compatibility source_swi = link.get('source_wan_path_id') if source_swi: link['source_wan_if_id'] = source_swi # 4.4.1 dest_swi = link.get('target_wan_if_id') if not dest_swi: # 4.3.x compatibility dest_swi = link.get('target_wan_path_id') if dest_swi: link['target_wan_if_id'] = dest_swi # create anynet lookup key # anynet_lookup_key = "_".join(sorted([source_swi, dest_swi])) # use path ID anynet_lookup_key = link.get('path_id') if not all_anynets.get(anynet_lookup_key, None): # path is not in current anynets, add all_anynets[anynet_lookup_key] = link else: # path_id already seen. pass elif link_type in ['vpn']: vpn_lookup_key = link.get('path_id') if not all_vpns.get(vpn_lookup_key, None): # path is not in VPNs, add. all_vpns[vpn_lookup_key] = link else: # Bail out logger.info("ERROR: could not query site ID {0}. Continuing.".format(site)) # update all_anynets with site info. Can't do this above, because xlation table not finished when needed. for anynet_key, link in all_anynets.items(): # 4.4.1 source_swi = link.get('source_wan_if_id') if not source_swi: # 4.3.x compatibility source_swi = link.get('source_wan_path_id') # 4.4.1 dest_swi = link.get('target_wan_if_id') if not dest_swi: # 4.3.x compatibility dest_swi = link.get('target_wan_path_id') source_site_id = swi_to_site_dict.get(source_swi, 'UNKNOWN (Unable to map SWI to Site ID)') target_site_id = swi_to_site_dict.get(dest_swi, 'UNKNOWN (Unable to map SWI to Site ID)') source_wan_network_name = link.get("source_wan_network") target_wan_network_name = link.get("target_wan_network") # update struct in case it's needed later link['source_site_id'] = source_site_id link['target_site_id'] = target_site_id # get names. source_site_name = id_site_dict.get(source_site_id, source_site_id) target_site_name = id_site_dict.get(target_site_id, target_site_id) source_swi_name = swi_id_name_dict.get(source_swi, source_swi) target_swi_name = swi_id_name_dict.get(dest_swi, dest_swi) # build text map. anynet_text = "{0} ('{1}' via '{2}') <-> ('{4}' via '{5}') {3}".format( source_site_name, source_wan_network_name, source_swi_name, target_site_name, target_wan_network_name, target_swi_name, ) # update pathid to name dict path_id_to_name[anynet_key] = anynet_text logger.info("SWI -> WN xlate ({0}): {1}".format(len(swi_to_wan_network_dict), json.dumps(swi_to_wan_network_dict, indent=4))) logger.info("All Anynets ({0}): {1}".format(len(all_anynets), json.dumps(all_anynets, indent=4))) logger.info("All VPNs ({0}): {1}".format(len(all_vpns), json.dumps(all_vpns, indent=4))) logger.info("Site -> SWI construct ({0}): {1}".format(len(site_swi_dict), json.dumps(site_swi_dict, indent=4))) logger.info("WN to SWI xlate ({0}): {1}".format(len(wan_network_to_swi_dict), json.dumps(wan_network_to_swi_dict, indent=4))) logger.info("SWI -> SITE xlate ({0}): {1}".format(len(swi_to_site_dict), json.dumps(swi_to_site_dict, indent=4))) # create VPN to anynet maps AND update text mappings. for vpn_key, link in all_vpns.items(): anynet_link_id = link.get("anynet_link_id") source_element_id = link.get("source_node_id") target_element_id = link.get("target_node_id") # update vpn -> anynet table vpn_id_to_anynet_id[vpn_key] = anynet_link_id # get names source_element_name = id_element_dict.get(source_element_id, source_element_id) target_element_name = id_element_dict.get(target_element_id, target_element_id) anynet_text = path_id_to_name.get(anynet_link_id, anynet_link_id) vpn_text = "[{0}] : {1} : [{2}]".format( source_element_name, anynet_text, target_element_name ) # update path mapping path_id_to_name[vpn_key] = vpn_text # done, update global global_id_name_dict.update(path_id_to_name) if reverse: # return both id_name and what we can get of name_id. return global_id_name_dict, global_name_id_dict return global_id_name_dict
python
def generate_id_name_map(sdk, reverse=False): """ Generate the ID-NAME map dict :param sdk: CloudGenix API constructor :param reverse: Generate reverse name-> ID map as well, return tuple with both. :return: ID Name dictionary """ global_id_name_dict = {} global_name_id_dict = {} # system struct system_list = [] # Global lookup dictionary for sub items if_id_to_name = {} global_swi_id = {} global_ln_id = {} swi_to_wan_network_dict = {} swi_to_site_dict = {} wan_network_to_swi_dict = {} all_anynets = {} all_vpns = {} swi_id_name_dict = {} site_swi_dict = {} path_id_to_name = {} vpn_id_to_anynet_id = {} # Create xlation dicts and lists. logger.info("Caching Operators..") id_operator_dict, operator_id_dict = operators_to_name_dict(sdk) if id_operator_dict: global_id_name_dict.update(id_operator_dict) global_name_id_dict.update(operator_id_dict) if operator_id_dict: global_name_id_dict.update(operator_id_dict) logger.info("Caching Sites..") id_site_dict, site_id_dict, site_id_list, site_info_dict = siteid_to_name_dict(sdk) global_id_name_dict.update(id_site_dict) global_name_id_dict.update(site_id_dict) logger.info("Caching Elements..") id_element_dict, element_id_dict, element_site_dict, element_id_list = elements_to_name_dict(sdk) global_id_name_dict.update(id_element_dict) global_name_id_dict.update(element_id_dict) logger.info("Caching WAN Networks..") id_wannetwork_dict, name_wannetwork_id_dict, wannetwork_id_list, wannetwork_type_dict = wan_network_dicts(sdk) global_id_name_dict.update(id_wannetwork_dict) global_name_id_dict.update(name_wannetwork_id_dict) logger.info("Caching Circuit Catagories..") id_circuit_categories, name_circuit_categories = circuit_categories_dicts(sdk) global_id_name_dict.update(id_circuit_categories) global_name_id_dict.update(name_circuit_categories) logger.info("Caching Network Contexts..") id_network_contexts, name_circuit_contexts = network_context_dicts(sdk) global_id_name_dict.update(id_network_contexts) global_name_id_dict.update(name_circuit_contexts) logger.info("Caching Appdefs..") id_appdef_dict, name_appdef_dict, appdef_id_list = appdefs_to_name_dict(sdk) global_id_name_dict.update(id_appdef_dict) global_name_id_dict.update(name_appdef_dict) logger.info("Caching Policysets..") id_policyset_dict, name_policyset_dict, policyset_id_list = policyset_to_name_dict(sdk) global_id_name_dict.update(id_policyset_dict) global_name_id_dict.update(name_policyset_dict) logger.info("Caching Security Policysets..") id_securitypolicyset_dict, name_securitypolicyset_dict, \ securitypolicyset_id_list = securitypolicyset_to_name_dict(sdk) global_id_name_dict.update(id_securitypolicyset_dict) global_name_id_dict.update(name_securitypolicyset_dict) logger.info("Caching Security Zones..") id_securityzone_dict, securityzone_id_dict, securityzone_id_list = securityzone_to_name_dict(sdk) global_id_name_dict.update(id_securityzone_dict) global_name_id_dict.update(securityzone_id_dict) id_interface_dict = {} logger.info("Filling Network Site->Element->Interface table..") for site in site_id_list: elements = [] swi_id_dict = {} ln_id_dict = {} # enumerate elements for element in element_id_list: # Is this element bound to a site? site_in = element_site_dict.get(element, None) # if it is bound, and bound to this site, add to list. if site_in and site_in == site: # Query interfaces interfaces_list, if_id_to_name_item, if_name_to_id_item, _, \ _, if_id_data_entry = interface_query(site, element, sdk) # add the element to the list elements.append({ 'id': element, 'name': id_element_dict.get(element, ""), 'interfaces': interfaces_list }) # add the if id name mapping to the main dict if_id_to_name.update(if_id_to_name_item) # update grand interface list id_interface_dict.update(if_id_data_entry) system_list.append({ 'id': site, 'name': id_site_dict.get(site, ""), 'elements': elements }) # query Site WAN Interface info resp = sdk.get.waninterfaces(site) swi_status = resp.cgx_status swi_query = resp.cgx_content if swi_status: # iterate all the site wan interfaces for current_swi in swi_query.get('items', []): # get the WN bound to the SWI. wan_network_id = current_swi.get('network_id', "") swi_id = current_swi.get('id', "") name = current_swi.get('name') if name and swi_id: swi_id_name_dict[swi_id] = name elif swi_id and wan_network_id: # Handle SWI with no name. wan_network_name = id_wannetwork_dict.get(wan_network_id, wan_network_id) swi_id_name_dict[swi_id] = "Circuit to {0}".format(wan_network_name) if swi_id: # update SWI -> Site xlation dict swi_to_site_dict[swi_id] = site # get the SWIs if wan_network_id and swi_id: logger.debug('SWI_ID = SITE: {0} = {1}'.format(swi_id, site)) # query existing wan_network_to_swi dict if entry exists. existing_swi_list = wan_network_to_swi_dict.get(wan_network_id, []) # update swi -> WN xlate dict swi_to_wan_network_dict[swi_id] = wan_network_id # update WN -> swi xlate dict existing_swi_list.append(swi_id) wan_network_to_swi_dict[wan_network_id] = existing_swi_list # add to global global_swi_id.update(swi_id_name_dict) # query LAN Network info resp = sdk.get.lannetworks(site) ln_status = resp.cgx_status ln_query = resp.cgx_content if ln_status: for ln in ln_query.get('items'): ln_id = ln.get('id') ln_name = ln.get('name') if ln_id and ln_name: ln_id_dict[ln_id] = ln_name # add to global global_ln_id.update(ln_id_dict) logger.info("Loading VPN topology information for {0} sites, please wait...".format(len(site_id_list))) # add all interface IDs # note - can't reliably make reverse name to ID items here, as they are not global. global_id_name_dict.update(if_id_to_name) global_id_name_dict.update(global_swi_id) global_id_name_dict.update(global_ln_id) for site in site_id_list: site_swi_list = [] query = { "type": "basenet", "nodes": [ site ] } status = False rest_call_retry = 0 resp = sdk.post.topology(query) status = resp.cgx_status topology = resp.cgx_content if status and topology: # iterate topology. We need to iterate all of the matching SWIs, and existing anynet connections (sorted). logger.debug("TOPOLOGY: {0}".format(json.dumps(topology, indent=4))) for link in topology.get('links', []): link_type = link.get('type', "") # if an anynet link (SWI to SWI) if link_type in ["anynet", "public-anynet", "private-anynet"]: # vpn record, check for uniqueness. # 4.4.1 source_swi = link.get('source_wan_if_id') if not source_swi: # 4.3.x compatibility source_swi = link.get('source_wan_path_id') if source_swi: link['source_wan_if_id'] = source_swi # 4.4.1 dest_swi = link.get('target_wan_if_id') if not dest_swi: # 4.3.x compatibility dest_swi = link.get('target_wan_path_id') if dest_swi: link['target_wan_if_id'] = dest_swi # create anynet lookup key # anynet_lookup_key = "_".join(sorted([source_swi, dest_swi])) # use path ID anynet_lookup_key = link.get('path_id') if not all_anynets.get(anynet_lookup_key, None): # path is not in current anynets, add all_anynets[anynet_lookup_key] = link else: # path_id already seen. pass elif link_type in ['vpn']: vpn_lookup_key = link.get('path_id') if not all_vpns.get(vpn_lookup_key, None): # path is not in VPNs, add. all_vpns[vpn_lookup_key] = link else: # Bail out logger.info("ERROR: could not query site ID {0}. Continuing.".format(site)) # update all_anynets with site info. Can't do this above, because xlation table not finished when needed. for anynet_key, link in all_anynets.items(): # 4.4.1 source_swi = link.get('source_wan_if_id') if not source_swi: # 4.3.x compatibility source_swi = link.get('source_wan_path_id') # 4.4.1 dest_swi = link.get('target_wan_if_id') if not dest_swi: # 4.3.x compatibility dest_swi = link.get('target_wan_path_id') source_site_id = swi_to_site_dict.get(source_swi, 'UNKNOWN (Unable to map SWI to Site ID)') target_site_id = swi_to_site_dict.get(dest_swi, 'UNKNOWN (Unable to map SWI to Site ID)') source_wan_network_name = link.get("source_wan_network") target_wan_network_name = link.get("target_wan_network") # update struct in case it's needed later link['source_site_id'] = source_site_id link['target_site_id'] = target_site_id # get names. source_site_name = id_site_dict.get(source_site_id, source_site_id) target_site_name = id_site_dict.get(target_site_id, target_site_id) source_swi_name = swi_id_name_dict.get(source_swi, source_swi) target_swi_name = swi_id_name_dict.get(dest_swi, dest_swi) # build text map. anynet_text = "{0} ('{1}' via '{2}') <-> ('{4}' via '{5}') {3}".format( source_site_name, source_wan_network_name, source_swi_name, target_site_name, target_wan_network_name, target_swi_name, ) # update pathid to name dict path_id_to_name[anynet_key] = anynet_text logger.info("SWI -> WN xlate ({0}): {1}".format(len(swi_to_wan_network_dict), json.dumps(swi_to_wan_network_dict, indent=4))) logger.info("All Anynets ({0}): {1}".format(len(all_anynets), json.dumps(all_anynets, indent=4))) logger.info("All VPNs ({0}): {1}".format(len(all_vpns), json.dumps(all_vpns, indent=4))) logger.info("Site -> SWI construct ({0}): {1}".format(len(site_swi_dict), json.dumps(site_swi_dict, indent=4))) logger.info("WN to SWI xlate ({0}): {1}".format(len(wan_network_to_swi_dict), json.dumps(wan_network_to_swi_dict, indent=4))) logger.info("SWI -> SITE xlate ({0}): {1}".format(len(swi_to_site_dict), json.dumps(swi_to_site_dict, indent=4))) # create VPN to anynet maps AND update text mappings. for vpn_key, link in all_vpns.items(): anynet_link_id = link.get("anynet_link_id") source_element_id = link.get("source_node_id") target_element_id = link.get("target_node_id") # update vpn -> anynet table vpn_id_to_anynet_id[vpn_key] = anynet_link_id # get names source_element_name = id_element_dict.get(source_element_id, source_element_id) target_element_name = id_element_dict.get(target_element_id, target_element_id) anynet_text = path_id_to_name.get(anynet_link_id, anynet_link_id) vpn_text = "[{0}] : {1} : [{2}]".format( source_element_name, anynet_text, target_element_name ) # update path mapping path_id_to_name[vpn_key] = vpn_text # done, update global global_id_name_dict.update(path_id_to_name) if reverse: # return both id_name and what we can get of name_id. return global_id_name_dict, global_name_id_dict return global_id_name_dict
[ "def", "generate_id_name_map", "(", "sdk", ",", "reverse", "=", "False", ")", ":", "global_id_name_dict", "=", "{", "}", "global_name_id_dict", "=", "{", "}", "# system struct", "system_list", "=", "[", "]", "# Global lookup dictionary for sub items", "if_id_to_name", "=", "{", "}", "global_swi_id", "=", "{", "}", "global_ln_id", "=", "{", "}", "swi_to_wan_network_dict", "=", "{", "}", "swi_to_site_dict", "=", "{", "}", "wan_network_to_swi_dict", "=", "{", "}", "all_anynets", "=", "{", "}", "all_vpns", "=", "{", "}", "swi_id_name_dict", "=", "{", "}", "site_swi_dict", "=", "{", "}", "path_id_to_name", "=", "{", "}", "vpn_id_to_anynet_id", "=", "{", "}", "# Create xlation dicts and lists.", "logger", ".", "info", "(", "\"Caching Operators..\"", ")", "id_operator_dict", ",", "operator_id_dict", "=", "operators_to_name_dict", "(", "sdk", ")", "if", "id_operator_dict", ":", "global_id_name_dict", ".", "update", "(", "id_operator_dict", ")", "global_name_id_dict", ".", "update", "(", "operator_id_dict", ")", "if", "operator_id_dict", ":", "global_name_id_dict", ".", "update", "(", "operator_id_dict", ")", "logger", ".", "info", "(", "\"Caching Sites..\"", ")", "id_site_dict", ",", "site_id_dict", ",", "site_id_list", ",", "site_info_dict", "=", "siteid_to_name_dict", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_site_dict", ")", "global_name_id_dict", ".", "update", "(", "site_id_dict", ")", "logger", ".", "info", "(", "\"Caching Elements..\"", ")", "id_element_dict", ",", "element_id_dict", ",", "element_site_dict", ",", "element_id_list", "=", "elements_to_name_dict", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_element_dict", ")", "global_name_id_dict", ".", "update", "(", "element_id_dict", ")", "logger", ".", "info", "(", "\"Caching WAN Networks..\"", ")", "id_wannetwork_dict", ",", "name_wannetwork_id_dict", ",", "wannetwork_id_list", ",", "wannetwork_type_dict", "=", "wan_network_dicts", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_wannetwork_dict", ")", "global_name_id_dict", ".", "update", "(", "name_wannetwork_id_dict", ")", "logger", ".", "info", "(", "\"Caching Circuit Catagories..\"", ")", "id_circuit_categories", ",", "name_circuit_categories", "=", "circuit_categories_dicts", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_circuit_categories", ")", "global_name_id_dict", ".", "update", "(", "name_circuit_categories", ")", "logger", ".", "info", "(", "\"Caching Network Contexts..\"", ")", "id_network_contexts", ",", "name_circuit_contexts", "=", "network_context_dicts", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_network_contexts", ")", "global_name_id_dict", ".", "update", "(", "name_circuit_contexts", ")", "logger", ".", "info", "(", "\"Caching Appdefs..\"", ")", "id_appdef_dict", ",", "name_appdef_dict", ",", "appdef_id_list", "=", "appdefs_to_name_dict", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_appdef_dict", ")", "global_name_id_dict", ".", "update", "(", "name_appdef_dict", ")", "logger", ".", "info", "(", "\"Caching Policysets..\"", ")", "id_policyset_dict", ",", "name_policyset_dict", ",", "policyset_id_list", "=", "policyset_to_name_dict", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_policyset_dict", ")", "global_name_id_dict", ".", "update", "(", "name_policyset_dict", ")", "logger", ".", "info", "(", "\"Caching Security Policysets..\"", ")", "id_securitypolicyset_dict", ",", "name_securitypolicyset_dict", ",", "securitypolicyset_id_list", "=", "securitypolicyset_to_name_dict", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_securitypolicyset_dict", ")", "global_name_id_dict", ".", "update", "(", "name_securitypolicyset_dict", ")", "logger", ".", "info", "(", "\"Caching Security Zones..\"", ")", "id_securityzone_dict", ",", "securityzone_id_dict", ",", "securityzone_id_list", "=", "securityzone_to_name_dict", "(", "sdk", ")", "global_id_name_dict", ".", "update", "(", "id_securityzone_dict", ")", "global_name_id_dict", ".", "update", "(", "securityzone_id_dict", ")", "id_interface_dict", "=", "{", "}", "logger", ".", "info", "(", "\"Filling Network Site->Element->Interface table..\"", ")", "for", "site", "in", "site_id_list", ":", "elements", "=", "[", "]", "swi_id_dict", "=", "{", "}", "ln_id_dict", "=", "{", "}", "# enumerate elements", "for", "element", "in", "element_id_list", ":", "# Is this element bound to a site?", "site_in", "=", "element_site_dict", ".", "get", "(", "element", ",", "None", ")", "# if it is bound, and bound to this site, add to list.", "if", "site_in", "and", "site_in", "==", "site", ":", "# Query interfaces", "interfaces_list", ",", "if_id_to_name_item", ",", "if_name_to_id_item", ",", "_", ",", "_", ",", "if_id_data_entry", "=", "interface_query", "(", "site", ",", "element", ",", "sdk", ")", "# add the element to the list", "elements", ".", "append", "(", "{", "'id'", ":", "element", ",", "'name'", ":", "id_element_dict", ".", "get", "(", "element", ",", "\"\"", ")", ",", "'interfaces'", ":", "interfaces_list", "}", ")", "# add the if id name mapping to the main dict", "if_id_to_name", ".", "update", "(", "if_id_to_name_item", ")", "# update grand interface list", "id_interface_dict", ".", "update", "(", "if_id_data_entry", ")", "system_list", ".", "append", "(", "{", "'id'", ":", "site", ",", "'name'", ":", "id_site_dict", ".", "get", "(", "site", ",", "\"\"", ")", ",", "'elements'", ":", "elements", "}", ")", "# query Site WAN Interface info", "resp", "=", "sdk", ".", "get", ".", "waninterfaces", "(", "site", ")", "swi_status", "=", "resp", ".", "cgx_status", "swi_query", "=", "resp", ".", "cgx_content", "if", "swi_status", ":", "# iterate all the site wan interfaces", "for", "current_swi", "in", "swi_query", ".", "get", "(", "'items'", ",", "[", "]", ")", ":", "# get the WN bound to the SWI.", "wan_network_id", "=", "current_swi", ".", "get", "(", "'network_id'", ",", "\"\"", ")", "swi_id", "=", "current_swi", ".", "get", "(", "'id'", ",", "\"\"", ")", "name", "=", "current_swi", ".", "get", "(", "'name'", ")", "if", "name", "and", "swi_id", ":", "swi_id_name_dict", "[", "swi_id", "]", "=", "name", "elif", "swi_id", "and", "wan_network_id", ":", "# Handle SWI with no name.", "wan_network_name", "=", "id_wannetwork_dict", ".", "get", "(", "wan_network_id", ",", "wan_network_id", ")", "swi_id_name_dict", "[", "swi_id", "]", "=", "\"Circuit to {0}\"", ".", "format", "(", "wan_network_name", ")", "if", "swi_id", ":", "# update SWI -> Site xlation dict", "swi_to_site_dict", "[", "swi_id", "]", "=", "site", "# get the SWIs", "if", "wan_network_id", "and", "swi_id", ":", "logger", ".", "debug", "(", "'SWI_ID = SITE: {0} = {1}'", ".", "format", "(", "swi_id", ",", "site", ")", ")", "# query existing wan_network_to_swi dict if entry exists.", "existing_swi_list", "=", "wan_network_to_swi_dict", ".", "get", "(", "wan_network_id", ",", "[", "]", ")", "# update swi -> WN xlate dict", "swi_to_wan_network_dict", "[", "swi_id", "]", "=", "wan_network_id", "# update WN -> swi xlate dict", "existing_swi_list", ".", "append", "(", "swi_id", ")", "wan_network_to_swi_dict", "[", "wan_network_id", "]", "=", "existing_swi_list", "# add to global", "global_swi_id", ".", "update", "(", "swi_id_name_dict", ")", "# query LAN Network info", "resp", "=", "sdk", ".", "get", ".", "lannetworks", "(", "site", ")", "ln_status", "=", "resp", ".", "cgx_status", "ln_query", "=", "resp", ".", "cgx_content", "if", "ln_status", ":", "for", "ln", "in", "ln_query", ".", "get", "(", "'items'", ")", ":", "ln_id", "=", "ln", ".", "get", "(", "'id'", ")", "ln_name", "=", "ln", ".", "get", "(", "'name'", ")", "if", "ln_id", "and", "ln_name", ":", "ln_id_dict", "[", "ln_id", "]", "=", "ln_name", "# add to global", "global_ln_id", ".", "update", "(", "ln_id_dict", ")", "logger", ".", "info", "(", "\"Loading VPN topology information for {0} sites, please wait...\"", ".", "format", "(", "len", "(", "site_id_list", ")", ")", ")", "# add all interface IDs", "# note - can't reliably make reverse name to ID items here, as they are not global.", "global_id_name_dict", ".", "update", "(", "if_id_to_name", ")", "global_id_name_dict", ".", "update", "(", "global_swi_id", ")", "global_id_name_dict", ".", "update", "(", "global_ln_id", ")", "for", "site", "in", "site_id_list", ":", "site_swi_list", "=", "[", "]", "query", "=", "{", "\"type\"", ":", "\"basenet\"", ",", "\"nodes\"", ":", "[", "site", "]", "}", "status", "=", "False", "rest_call_retry", "=", "0", "resp", "=", "sdk", ".", "post", ".", "topology", "(", "query", ")", "status", "=", "resp", ".", "cgx_status", "topology", "=", "resp", ".", "cgx_content", "if", "status", "and", "topology", ":", "# iterate topology. We need to iterate all of the matching SWIs, and existing anynet connections (sorted).", "logger", ".", "debug", "(", "\"TOPOLOGY: {0}\"", ".", "format", "(", "json", ".", "dumps", "(", "topology", ",", "indent", "=", "4", ")", ")", ")", "for", "link", "in", "topology", ".", "get", "(", "'links'", ",", "[", "]", ")", ":", "link_type", "=", "link", ".", "get", "(", "'type'", ",", "\"\"", ")", "# if an anynet link (SWI to SWI)", "if", "link_type", "in", "[", "\"anynet\"", ",", "\"public-anynet\"", ",", "\"private-anynet\"", "]", ":", "# vpn record, check for uniqueness.", "# 4.4.1", "source_swi", "=", "link", ".", "get", "(", "'source_wan_if_id'", ")", "if", "not", "source_swi", ":", "# 4.3.x compatibility", "source_swi", "=", "link", ".", "get", "(", "'source_wan_path_id'", ")", "if", "source_swi", ":", "link", "[", "'source_wan_if_id'", "]", "=", "source_swi", "# 4.4.1", "dest_swi", "=", "link", ".", "get", "(", "'target_wan_if_id'", ")", "if", "not", "dest_swi", ":", "# 4.3.x compatibility", "dest_swi", "=", "link", ".", "get", "(", "'target_wan_path_id'", ")", "if", "dest_swi", ":", "link", "[", "'target_wan_if_id'", "]", "=", "dest_swi", "# create anynet lookup key", "# anynet_lookup_key = \"_\".join(sorted([source_swi, dest_swi]))", "# use path ID", "anynet_lookup_key", "=", "link", ".", "get", "(", "'path_id'", ")", "if", "not", "all_anynets", ".", "get", "(", "anynet_lookup_key", ",", "None", ")", ":", "# path is not in current anynets, add", "all_anynets", "[", "anynet_lookup_key", "]", "=", "link", "else", ":", "# path_id already seen.", "pass", "elif", "link_type", "in", "[", "'vpn'", "]", ":", "vpn_lookup_key", "=", "link", ".", "get", "(", "'path_id'", ")", "if", "not", "all_vpns", ".", "get", "(", "vpn_lookup_key", ",", "None", ")", ":", "# path is not in VPNs, add.", "all_vpns", "[", "vpn_lookup_key", "]", "=", "link", "else", ":", "# Bail out", "logger", ".", "info", "(", "\"ERROR: could not query site ID {0}. Continuing.\"", ".", "format", "(", "site", ")", ")", "# update all_anynets with site info. Can't do this above, because xlation table not finished when needed.", "for", "anynet_key", ",", "link", "in", "all_anynets", ".", "items", "(", ")", ":", "# 4.4.1", "source_swi", "=", "link", ".", "get", "(", "'source_wan_if_id'", ")", "if", "not", "source_swi", ":", "# 4.3.x compatibility", "source_swi", "=", "link", ".", "get", "(", "'source_wan_path_id'", ")", "# 4.4.1", "dest_swi", "=", "link", ".", "get", "(", "'target_wan_if_id'", ")", "if", "not", "dest_swi", ":", "# 4.3.x compatibility", "dest_swi", "=", "link", ".", "get", "(", "'target_wan_path_id'", ")", "source_site_id", "=", "swi_to_site_dict", ".", "get", "(", "source_swi", ",", "'UNKNOWN (Unable to map SWI to Site ID)'", ")", "target_site_id", "=", "swi_to_site_dict", ".", "get", "(", "dest_swi", ",", "'UNKNOWN (Unable to map SWI to Site ID)'", ")", "source_wan_network_name", "=", "link", ".", "get", "(", "\"source_wan_network\"", ")", "target_wan_network_name", "=", "link", ".", "get", "(", "\"target_wan_network\"", ")", "# update struct in case it's needed later", "link", "[", "'source_site_id'", "]", "=", "source_site_id", "link", "[", "'target_site_id'", "]", "=", "target_site_id", "# get names.", "source_site_name", "=", "id_site_dict", ".", "get", "(", "source_site_id", ",", "source_site_id", ")", "target_site_name", "=", "id_site_dict", ".", "get", "(", "target_site_id", ",", "target_site_id", ")", "source_swi_name", "=", "swi_id_name_dict", ".", "get", "(", "source_swi", ",", "source_swi", ")", "target_swi_name", "=", "swi_id_name_dict", ".", "get", "(", "dest_swi", ",", "dest_swi", ")", "# build text map.", "anynet_text", "=", "\"{0} ('{1}' via '{2}') <-> ('{4}' via '{5}') {3}\"", ".", "format", "(", "source_site_name", ",", "source_wan_network_name", ",", "source_swi_name", ",", "target_site_name", ",", "target_wan_network_name", ",", "target_swi_name", ",", ")", "# update pathid to name dict", "path_id_to_name", "[", "anynet_key", "]", "=", "anynet_text", "logger", ".", "info", "(", "\"SWI -> WN xlate ({0}): {1}\"", ".", "format", "(", "len", "(", "swi_to_wan_network_dict", ")", ",", "json", ".", "dumps", "(", "swi_to_wan_network_dict", ",", "indent", "=", "4", ")", ")", ")", "logger", ".", "info", "(", "\"All Anynets ({0}): {1}\"", ".", "format", "(", "len", "(", "all_anynets", ")", ",", "json", ".", "dumps", "(", "all_anynets", ",", "indent", "=", "4", ")", ")", ")", "logger", ".", "info", "(", "\"All VPNs ({0}): {1}\"", ".", "format", "(", "len", "(", "all_vpns", ")", ",", "json", ".", "dumps", "(", "all_vpns", ",", "indent", "=", "4", ")", ")", ")", "logger", ".", "info", "(", "\"Site -> SWI construct ({0}): {1}\"", ".", "format", "(", "len", "(", "site_swi_dict", ")", ",", "json", ".", "dumps", "(", "site_swi_dict", ",", "indent", "=", "4", ")", ")", ")", "logger", ".", "info", "(", "\"WN to SWI xlate ({0}): {1}\"", ".", "format", "(", "len", "(", "wan_network_to_swi_dict", ")", ",", "json", ".", "dumps", "(", "wan_network_to_swi_dict", ",", "indent", "=", "4", ")", ")", ")", "logger", ".", "info", "(", "\"SWI -> SITE xlate ({0}): {1}\"", ".", "format", "(", "len", "(", "swi_to_site_dict", ")", ",", "json", ".", "dumps", "(", "swi_to_site_dict", ",", "indent", "=", "4", ")", ")", ")", "# create VPN to anynet maps AND update text mappings.", "for", "vpn_key", ",", "link", "in", "all_vpns", ".", "items", "(", ")", ":", "anynet_link_id", "=", "link", ".", "get", "(", "\"anynet_link_id\"", ")", "source_element_id", "=", "link", ".", "get", "(", "\"source_node_id\"", ")", "target_element_id", "=", "link", ".", "get", "(", "\"target_node_id\"", ")", "# update vpn -> anynet table", "vpn_id_to_anynet_id", "[", "vpn_key", "]", "=", "anynet_link_id", "# get names", "source_element_name", "=", "id_element_dict", ".", "get", "(", "source_element_id", ",", "source_element_id", ")", "target_element_name", "=", "id_element_dict", ".", "get", "(", "target_element_id", ",", "target_element_id", ")", "anynet_text", "=", "path_id_to_name", ".", "get", "(", "anynet_link_id", ",", "anynet_link_id", ")", "vpn_text", "=", "\"[{0}] : {1} : [{2}]\"", ".", "format", "(", "source_element_name", ",", "anynet_text", ",", "target_element_name", ")", "# update path mapping", "path_id_to_name", "[", "vpn_key", "]", "=", "vpn_text", "# done, update global", "global_id_name_dict", ".", "update", "(", "path_id_to_name", ")", "if", "reverse", ":", "# return both id_name and what we can get of name_id.", "return", "global_id_name_dict", ",", "global_name_id_dict", "return", "global_id_name_dict" ]
Generate the ID-NAME map dict :param sdk: CloudGenix API constructor :param reverse: Generate reverse name-> ID map as well, return tuple with both. :return: ID Name dictionary
[ "Generate", "the", "ID", "-", "NAME", "map", "dict", ":", "param", "sdk", ":", "CloudGenix", "API", "constructor", ":", "param", "reverse", ":", "Generate", "reverse", "name", "-", ">", "ID", "map", "as", "well", "return", "tuple", "with", "both", ".", ":", "return", ":", "ID", "Name", "dictionary" ]
train
https://github.com/ebob9/cloudgenix-idname/blob/f372eaa768dad0610a0f225015f34067416c2b4a/cloudgenix_idname/__init__.py#L386-L719
jaredLunde/vital-tools
vital/tools/dicts.py
revrank_dict
def revrank_dict(dict, key=lambda t: t[1], as_tuple=False): """ Reverse sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple """ sorted_list = sorted(dict.items(), key=key, reverse=True) return OrderedDict(sorted_list) if not as_tuple else tuple(sorted_list)
python
def revrank_dict(dict, key=lambda t: t[1], as_tuple=False): """ Reverse sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple """ sorted_list = sorted(dict.items(), key=key, reverse=True) return OrderedDict(sorted_list) if not as_tuple else tuple(sorted_list)
[ "def", "revrank_dict", "(", "dict", ",", "key", "=", "lambda", "t", ":", "t", "[", "1", "]", ",", "as_tuple", "=", "False", ")", ":", "sorted_list", "=", "sorted", "(", "dict", ".", "items", "(", ")", ",", "key", "=", "key", ",", "reverse", "=", "True", ")", "return", "OrderedDict", "(", "sorted_list", ")", "if", "not", "as_tuple", "else", "tuple", "(", "sorted_list", ")" ]
Reverse sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple
[ "Reverse", "sorts", "a", "#dict", "by", "a", "given", "key", "optionally", "returning", "it", "as", "a", "#tuple", ".", "By", "default", "the", "@dict", "is", "sorted", "by", "it", "s", "value", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/tools/dicts.py#L31-L42
jaredLunde/vital-tools
vital/tools/dicts.py
rank_dict
def rank_dict(dict, key=lambda t: t[1], as_tuple=False): """ Sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple """ sorted_list = sorted(dict.items(), key=key) return OrderedDict(sorted_list) if not as_tuple else tuple(sorted_list)
python
def rank_dict(dict, key=lambda t: t[1], as_tuple=False): """ Sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple """ sorted_list = sorted(dict.items(), key=key) return OrderedDict(sorted_list) if not as_tuple else tuple(sorted_list)
[ "def", "rank_dict", "(", "dict", ",", "key", "=", "lambda", "t", ":", "t", "[", "1", "]", ",", "as_tuple", "=", "False", ")", ":", "sorted_list", "=", "sorted", "(", "dict", ".", "items", "(", ")", ",", "key", "=", "key", ")", "return", "OrderedDict", "(", "sorted_list", ")", "if", "not", "as_tuple", "else", "tuple", "(", "sorted_list", ")" ]
Sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple
[ "Sorts", "a", "#dict", "by", "a", "given", "key", "optionally", "returning", "it", "as", "a", "#tuple", ".", "By", "default", "the", "@dict", "is", "sorted", "by", "it", "s", "value", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/tools/dicts.py#L45-L56
jaredLunde/vital-tools
vital/tools/dicts.py
getitem_in
def getitem_in(obj, name): """ Finds a key in @obj via a period-delimited string @name. @obj: (#dict) @name: (#str) |.|-separated keys to search @obj in .. obj = {'foo': {'bar': {'baz': True}}} getitem_in(obj, 'foo.bar.baz') .. |True| """ for part in name.split('.'): obj = obj[part] return obj
python
def getitem_in(obj, name): """ Finds a key in @obj via a period-delimited string @name. @obj: (#dict) @name: (#str) |.|-separated keys to search @obj in .. obj = {'foo': {'bar': {'baz': True}}} getitem_in(obj, 'foo.bar.baz') .. |True| """ for part in name.split('.'): obj = obj[part] return obj
[ "def", "getitem_in", "(", "obj", ",", "name", ")", ":", "for", "part", "in", "name", ".", "split", "(", "'.'", ")", ":", "obj", "=", "obj", "[", "part", "]", "return", "obj" ]
Finds a key in @obj via a period-delimited string @name. @obj: (#dict) @name: (#str) |.|-separated keys to search @obj in .. obj = {'foo': {'bar': {'baz': True}}} getitem_in(obj, 'foo.bar.baz') .. |True|
[ "Finds", "a", "key", "in" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/tools/dicts.py#L59-L71
novopl/peltak
src/peltak/extra/gitflow/commands/release.py
start
def start(component, exact): # type: (str) -> None """ Create a new release. It will bump the current version number and create a release branch called `release/<version>` with one new commit (the version bump). **Example Config**:: \b version_file: 'src/mypkg/__init__.py' **Examples**:: \b $ peltak release start patch # Make a new patch release $ peltak release start minor # Make a new minor release $ peltak release start major # Make a new major release $ peltak release start # same as start patch """ from peltak.extra.gitflow import logic logic.release.start(component, exact)
python
def start(component, exact): # type: (str) -> None """ Create a new release. It will bump the current version number and create a release branch called `release/<version>` with one new commit (the version bump). **Example Config**:: \b version_file: 'src/mypkg/__init__.py' **Examples**:: \b $ peltak release start patch # Make a new patch release $ peltak release start minor # Make a new minor release $ peltak release start major # Make a new major release $ peltak release start # same as start patch """ from peltak.extra.gitflow import logic logic.release.start(component, exact)
[ "def", "start", "(", "component", ",", "exact", ")", ":", "# type: (str) -> None", "from", "peltak", ".", "extra", ".", "gitflow", "import", "logic", "logic", ".", "release", ".", "start", "(", "component", ",", "exact", ")" ]
Create a new release. It will bump the current version number and create a release branch called `release/<version>` with one new commit (the version bump). **Example Config**:: \b version_file: 'src/mypkg/__init__.py' **Examples**:: \b $ peltak release start patch # Make a new patch release $ peltak release start minor # Make a new minor release $ peltak release start major # Make a new major release $ peltak release start # same as start patch
[ "Create", "a", "new", "release", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/extra/gitflow/commands/release.py#L59-L81
novopl/peltak
src/peltak/extra/gitflow/commands/release.py
tag_release
def tag_release(message): # type: (str, bool) -> None """ Tag the current commit with as the current version release. This should be the same commit as the one that's uploaded as the release (to pypi for example). **Example Config**:: \b version_file: 'src/mypkg/__init__.py' Examples:: $ peltak release tag # Tag the current commit as release """ from peltak.extra.gitflow import logic logic.release.tag(message)
python
def tag_release(message): # type: (str, bool) -> None """ Tag the current commit with as the current version release. This should be the same commit as the one that's uploaded as the release (to pypi for example). **Example Config**:: \b version_file: 'src/mypkg/__init__.py' Examples:: $ peltak release tag # Tag the current commit as release """ from peltak.extra.gitflow import logic logic.release.tag(message)
[ "def", "tag_release", "(", "message", ")", ":", "# type: (str, bool) -> None", "from", "peltak", ".", "extra", ".", "gitflow", "import", "logic", "logic", ".", "release", ".", "tag", "(", "message", ")" ]
Tag the current commit with as the current version release. This should be the same commit as the one that's uploaded as the release (to pypi for example). **Example Config**:: \b version_file: 'src/mypkg/__init__.py' Examples:: $ peltak release tag # Tag the current commit as release
[ "Tag", "the", "current", "commit", "with", "as", "the", "current", "version", "release", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/extra/gitflow/commands/release.py#L91-L109
Vital-Fernandez/dazer
working_examples/pyCloudy_heliumEmissivities.py
query_file_location
def query_file_location(question, default_address): """ This function asks for a location file address from the command terminal it checks if the file exists before proceeding with the code "question" is a string that is presented to the user. "default_address" is the presumed file location. """ while True: if default_address == None: prompt = '{}:'.format(question, default_address) else: prompt = '{} [{}]'.format(question, default_address) sys.stdout.write(prompt) input_address = raw_input() if default_address is not None and input_address == '': input_address = default_address if os.path.isfile(input_address): return input_address else: print 'sorry no file was found at that location\n'
python
def query_file_location(question, default_address): """ This function asks for a location file address from the command terminal it checks if the file exists before proceeding with the code "question" is a string that is presented to the user. "default_address" is the presumed file location. """ while True: if default_address == None: prompt = '{}:'.format(question, default_address) else: prompt = '{} [{}]'.format(question, default_address) sys.stdout.write(prompt) input_address = raw_input() if default_address is not None and input_address == '': input_address = default_address if os.path.isfile(input_address): return input_address else: print 'sorry no file was found at that location\n'
[ "def", "query_file_location", "(", "question", ",", "default_address", ")", ":", "while", "True", ":", "if", "default_address", "==", "None", ":", "prompt", "=", "'{}:'", ".", "format", "(", "question", ",", "default_address", ")", "else", ":", "prompt", "=", "'{} [{}]'", ".", "format", "(", "question", ",", "default_address", ")", "sys", ".", "stdout", ".", "write", "(", "prompt", ")", "input_address", "=", "raw_input", "(", ")", "if", "default_address", "is", "not", "None", "and", "input_address", "==", "''", ":", "input_address", "=", "default_address", "if", "os", ".", "path", ".", "isfile", "(", "input_address", ")", ":", "return", "input_address", "else", ":", "print", "'sorry no file was found at that location\\n'" ]
This function asks for a location file address from the command terminal it checks if the file exists before proceeding with the code "question" is a string that is presented to the user. "default_address" is the presumed file location.
[ "This", "function", "asks", "for", "a", "location", "file", "address", "from", "the", "command", "terminal", "it", "checks", "if", "the", "file", "exists", "before", "proceeding", "with", "the", "code", "question", "is", "a", "string", "that", "is", "presented", "to", "the", "user", ".", "default_address", "is", "the", "presumed", "file", "location", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/working_examples/pyCloudy_heliumEmissivities.py#L8-L30
Vital-Fernandez/dazer
working_examples/pyCloudy_heliumEmissivities.py
declare_output_files
def declare_output_files(): """ This method establishes the output files from pycloudy grids For these simulations in which we are only want to calculate the emissivities from the hydrogen and helium lines most of the default files from pycloudy outputs are not required :return: """ #Exclude these files from being generated by the simulations components_remove = [#['radius', '.rad'], #['continuum', '.cont'], #['physical conditions', '.phy'], ['overview', '.ovr'], ['heating', '.heat'], ['cooling', '.cool'], ['optical depth', '.opd']] for item in components_remove: pc.config.SAVE_LIST.remove(item) #Exclude these elements files from being generated by the simulations elements_remove = [#['hydrogen','.ele_H'], #['helium','.ele_He'], ['carbon', '.ele_C'], ['nitrogen', '.ele_N'], ['oxygen', '.ele_O'], ['argon', '.ele_Ar'], ['neon', '.ele_Ne'], ['sulphur', '.ele_S'], ['chlorin', '.ele_Cl'], ['iron', '.ele_Fe'], ['silicon', '.ele_Si']] for item in elements_remove: pc.config.SAVE_LIST_ELEMS.remove(item) return
python
def declare_output_files(): """ This method establishes the output files from pycloudy grids For these simulations in which we are only want to calculate the emissivities from the hydrogen and helium lines most of the default files from pycloudy outputs are not required :return: """ #Exclude these files from being generated by the simulations components_remove = [#['radius', '.rad'], #['continuum', '.cont'], #['physical conditions', '.phy'], ['overview', '.ovr'], ['heating', '.heat'], ['cooling', '.cool'], ['optical depth', '.opd']] for item in components_remove: pc.config.SAVE_LIST.remove(item) #Exclude these elements files from being generated by the simulations elements_remove = [#['hydrogen','.ele_H'], #['helium','.ele_He'], ['carbon', '.ele_C'], ['nitrogen', '.ele_N'], ['oxygen', '.ele_O'], ['argon', '.ele_Ar'], ['neon', '.ele_Ne'], ['sulphur', '.ele_S'], ['chlorin', '.ele_Cl'], ['iron', '.ele_Fe'], ['silicon', '.ele_Si']] for item in elements_remove: pc.config.SAVE_LIST_ELEMS.remove(item) return
[ "def", "declare_output_files", "(", ")", ":", "#Exclude these files from being generated by the simulations", "components_remove", "=", "[", "#['radius', '.rad'],", "#['continuum', '.cont'],", "#['physical conditions', '.phy'],", "[", "'overview'", ",", "'.ovr'", "]", ",", "[", "'heating'", ",", "'.heat'", "]", ",", "[", "'cooling'", ",", "'.cool'", "]", ",", "[", "'optical depth'", ",", "'.opd'", "]", "]", "for", "item", "in", "components_remove", ":", "pc", ".", "config", ".", "SAVE_LIST", ".", "remove", "(", "item", ")", "#Exclude these elements files from being generated by the simulations", "elements_remove", "=", "[", "#['hydrogen','.ele_H'],", "#['helium','.ele_He'],", "[", "'carbon'", ",", "'.ele_C'", "]", ",", "[", "'nitrogen'", ",", "'.ele_N'", "]", ",", "[", "'oxygen'", ",", "'.ele_O'", "]", ",", "[", "'argon'", ",", "'.ele_Ar'", "]", ",", "[", "'neon'", ",", "'.ele_Ne'", "]", ",", "[", "'sulphur'", ",", "'.ele_S'", "]", ",", "[", "'chlorin'", ",", "'.ele_Cl'", "]", ",", "[", "'iron'", ",", "'.ele_Fe'", "]", ",", "[", "'silicon'", ",", "'.ele_Si'", "]", "]", "for", "item", "in", "elements_remove", ":", "pc", ".", "config", ".", "SAVE_LIST_ELEMS", ".", "remove", "(", "item", ")", "return" ]
This method establishes the output files from pycloudy grids For these simulations in which we are only want to calculate the emissivities from the hydrogen and helium lines most of the default files from pycloudy outputs are not required :return:
[ "This", "method", "establishes", "the", "output", "files", "from", "pycloudy", "grids", "For", "these", "simulations", "in", "which", "we", "are", "only", "want", "to", "calculate", "the", "emissivities", "from", "the", "hydrogen", "and", "helium", "lines", "most", "of", "the", "default", "files", "from", "pycloudy", "outputs", "are", "not", "required", ":", "return", ":" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/working_examples/pyCloudy_heliumEmissivities.py#L85-L120
kajala/django-jutil
jutil/format.py
format_full_name
def format_full_name(first_name: str, last_name: str, max_length: int = 20): """ Limits name length to specified length. Tries to keep name as human-readable an natural as possible. :param first_name: First name :param last_name: Last name :param max_length: Maximum length :return: Full name of shortened version depending on length """ # dont allow commas in limited names first_name = first_name.replace(',', ' ') last_name = last_name.replace(',', ' ') # accept short full names as is original_full_name = first_name + ' ' + last_name if len(original_full_name) <= max_length: return original_full_name # drop middle names first_name = first_name.split(' ')[0] full_name = first_name + ' ' + last_name if len(full_name) <= max_length: return full_name # drop latter parts of combined first names first_name = re.split(r'[\s\-]', first_name)[0] full_name = first_name + ' ' + last_name if len(full_name) <= max_length: return full_name # drop latter parts of multi part last names last_name = re.split(r'[\s\-]', last_name)[0] full_name = first_name + ' ' + last_name if len(full_name) <= max_length: return full_name # shorten last name to one letter last_name = last_name[:1] full_name = first_name + ' ' + last_name if len(full_name) > max_length: raise Exception('Failed to shorten name {}'.format(original_full_name)) return full_name
python
def format_full_name(first_name: str, last_name: str, max_length: int = 20): """ Limits name length to specified length. Tries to keep name as human-readable an natural as possible. :param first_name: First name :param last_name: Last name :param max_length: Maximum length :return: Full name of shortened version depending on length """ # dont allow commas in limited names first_name = first_name.replace(',', ' ') last_name = last_name.replace(',', ' ') # accept short full names as is original_full_name = first_name + ' ' + last_name if len(original_full_name) <= max_length: return original_full_name # drop middle names first_name = first_name.split(' ')[0] full_name = first_name + ' ' + last_name if len(full_name) <= max_length: return full_name # drop latter parts of combined first names first_name = re.split(r'[\s\-]', first_name)[0] full_name = first_name + ' ' + last_name if len(full_name) <= max_length: return full_name # drop latter parts of multi part last names last_name = re.split(r'[\s\-]', last_name)[0] full_name = first_name + ' ' + last_name if len(full_name) <= max_length: return full_name # shorten last name to one letter last_name = last_name[:1] full_name = first_name + ' ' + last_name if len(full_name) > max_length: raise Exception('Failed to shorten name {}'.format(original_full_name)) return full_name
[ "def", "format_full_name", "(", "first_name", ":", "str", ",", "last_name", ":", "str", ",", "max_length", ":", "int", "=", "20", ")", ":", "# dont allow commas in limited names", "first_name", "=", "first_name", ".", "replace", "(", "','", ",", "' '", ")", "last_name", "=", "last_name", ".", "replace", "(", "','", ",", "' '", ")", "# accept short full names as is", "original_full_name", "=", "first_name", "+", "' '", "+", "last_name", "if", "len", "(", "original_full_name", ")", "<=", "max_length", ":", "return", "original_full_name", "# drop middle names", "first_name", "=", "first_name", ".", "split", "(", "' '", ")", "[", "0", "]", "full_name", "=", "first_name", "+", "' '", "+", "last_name", "if", "len", "(", "full_name", ")", "<=", "max_length", ":", "return", "full_name", "# drop latter parts of combined first names", "first_name", "=", "re", ".", "split", "(", "r'[\\s\\-]'", ",", "first_name", ")", "[", "0", "]", "full_name", "=", "first_name", "+", "' '", "+", "last_name", "if", "len", "(", "full_name", ")", "<=", "max_length", ":", "return", "full_name", "# drop latter parts of multi part last names", "last_name", "=", "re", ".", "split", "(", "r'[\\s\\-]'", ",", "last_name", ")", "[", "0", "]", "full_name", "=", "first_name", "+", "' '", "+", "last_name", "if", "len", "(", "full_name", ")", "<=", "max_length", ":", "return", "full_name", "# shorten last name to one letter", "last_name", "=", "last_name", "[", ":", "1", "]", "full_name", "=", "first_name", "+", "' '", "+", "last_name", "if", "len", "(", "full_name", ")", ">", "max_length", ":", "raise", "Exception", "(", "'Failed to shorten name {}'", ".", "format", "(", "original_full_name", ")", ")", "return", "full_name" ]
Limits name length to specified length. Tries to keep name as human-readable an natural as possible. :param first_name: First name :param last_name: Last name :param max_length: Maximum length :return: Full name of shortened version depending on length
[ "Limits", "name", "length", "to", "specified", "length", ".", "Tries", "to", "keep", "name", "as", "human", "-", "readable", "an", "natural", "as", "possible", ".", ":", "param", "first_name", ":", "First", "name", ":", "param", "last_name", ":", "Last", "name", ":", "param", "max_length", ":", "Maximum", "length", ":", "return", ":", "Full", "name", "of", "shortened", "version", "depending", "on", "length" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/format.py#L6-L47
kajala/django-jutil
jutil/format.py
format_timedelta
def format_timedelta(dt: timedelta) -> str: """ Formats timedelta to readable format, e.g. 1h30min. :param dt: timedelta :return: str """ seconds = int(dt.total_seconds()) days, remainder = divmod(seconds, 86400) hours, remainder = divmod(remainder, 3600) minutes, seconds = divmod(remainder, 60) s = "" if days > 0: s += str(days) + "d" if hours > 0: s += str(hours) + "h" if minutes > 0: s += str(minutes) + "min" if s == "": s = "0min" return s
python
def format_timedelta(dt: timedelta) -> str: """ Formats timedelta to readable format, e.g. 1h30min. :param dt: timedelta :return: str """ seconds = int(dt.total_seconds()) days, remainder = divmod(seconds, 86400) hours, remainder = divmod(remainder, 3600) minutes, seconds = divmod(remainder, 60) s = "" if days > 0: s += str(days) + "d" if hours > 0: s += str(hours) + "h" if minutes > 0: s += str(minutes) + "min" if s == "": s = "0min" return s
[ "def", "format_timedelta", "(", "dt", ":", "timedelta", ")", "->", "str", ":", "seconds", "=", "int", "(", "dt", ".", "total_seconds", "(", ")", ")", "days", ",", "remainder", "=", "divmod", "(", "seconds", ",", "86400", ")", "hours", ",", "remainder", "=", "divmod", "(", "remainder", ",", "3600", ")", "minutes", ",", "seconds", "=", "divmod", "(", "remainder", ",", "60", ")", "s", "=", "\"\"", "if", "days", ">", "0", ":", "s", "+=", "str", "(", "days", ")", "+", "\"d\"", "if", "hours", ">", "0", ":", "s", "+=", "str", "(", "hours", ")", "+", "\"h\"", "if", "minutes", ">", "0", ":", "s", "+=", "str", "(", "minutes", ")", "+", "\"min\"", "if", "s", "==", "\"\"", ":", "s", "=", "\"0min\"", "return", "s" ]
Formats timedelta to readable format, e.g. 1h30min. :param dt: timedelta :return: str
[ "Formats", "timedelta", "to", "readable", "format", "e", ".", "g", ".", "1h30min", ".", ":", "param", "dt", ":", "timedelta", ":", "return", ":", "str" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/format.py#L50-L69
kajala/django-jutil
jutil/format.py
format_xml
def format_xml(xml_str: str, exceptions: bool=False): """ Formats XML document as human-readable plain text. :param xml_str: str (Input XML str) :param exceptions: Raise exceptions on error :return: str (Formatted XML str) """ try: import xml.dom.minidom return xml.dom.minidom.parseString(xml_str).toprettyxml() except Exception: if exceptions: raise return xml_str
python
def format_xml(xml_str: str, exceptions: bool=False): """ Formats XML document as human-readable plain text. :param xml_str: str (Input XML str) :param exceptions: Raise exceptions on error :return: str (Formatted XML str) """ try: import xml.dom.minidom return xml.dom.minidom.parseString(xml_str).toprettyxml() except Exception: if exceptions: raise return xml_str
[ "def", "format_xml", "(", "xml_str", ":", "str", ",", "exceptions", ":", "bool", "=", "False", ")", ":", "try", ":", "import", "xml", ".", "dom", ".", "minidom", "return", "xml", ".", "dom", ".", "minidom", ".", "parseString", "(", "xml_str", ")", ".", "toprettyxml", "(", ")", "except", "Exception", ":", "if", "exceptions", ":", "raise", "return", "xml_str" ]
Formats XML document as human-readable plain text. :param xml_str: str (Input XML str) :param exceptions: Raise exceptions on error :return: str (Formatted XML str)
[ "Formats", "XML", "document", "as", "human", "-", "readable", "plain", "text", ".", ":", "param", "xml_str", ":", "str", "(", "Input", "XML", "str", ")", ":", "param", "exceptions", ":", "Raise", "exceptions", "on", "error", ":", "return", ":", "str", "(", "Formatted", "XML", "str", ")" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/format.py#L72-L85
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/samples/desktop_ttk_search_form/isogeo_tk_search_form_py3_async.py
IsogeoSearchForm.worker_allocator
def worker_allocator(self, async_loop, to_do, **kwargs): """ Handler starting the asyncio part. """ d = kwargs threading.Thread( target=self._asyncio_thread, args=(async_loop, to_do, d) ).start()
python
def worker_allocator(self, async_loop, to_do, **kwargs): """ Handler starting the asyncio part. """ d = kwargs threading.Thread( target=self._asyncio_thread, args=(async_loop, to_do, d) ).start()
[ "def", "worker_allocator", "(", "self", ",", "async_loop", ",", "to_do", ",", "*", "*", "kwargs", ")", ":", "d", "=", "kwargs", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_asyncio_thread", ",", "args", "=", "(", "async_loop", ",", "to_do", ",", "d", ")", ")", ".", "start", "(", ")" ]
Handler starting the asyncio part.
[ "Handler", "starting", "the", "asyncio", "part", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/samples/desktop_ttk_search_form/isogeo_tk_search_form_py3_async.py#L266-L271
nathankw/pulsarpy
pulsarpy/utils.py
send_mail
def send_mail(form, from_name): """ Sends a mail using the configured mail server for Pulsar. See mailgun documentation at https://documentation.mailgun.com/en/latest/user_manual.html#sending-via-api for specifics. Args: form: `dict`. The mail form fields, i.e. 'to', 'from', ... Returns: `requests.models.Response` instance. Raises: `requests.exceptions.HTTPError`: The status code is not ok. `Exception`: The environment variable MAILGUN_DOMAIN or MAILGUN_API_KEY isn't set. Example:: payload = { "from"="{} <mailgun@{}>".format(from_name, pulsarpy.MAIL_DOMAIN), "subject": "mailgun test", "text": "howdy there", "to": "nathankw@stanford.edu", } send_mail(payload) """ form["from"] = "{} <mailgun@{}>".format(from_name, pulsarpy.MAIL_DOMAIN), if not pulsarpy.MAIL_SERVER_URL: raise Exception("MAILGUN_DOMAIN environment variable not set.") if not pulsarpy.MAIL_AUTH[1]: raise Exception("MAILGUN_API_KEY environment varible not set.") res = requests.post(pulsarpy.MAIL_SERVER_URL, data=form, auth=pulsarpy.MAIL_AUTH) res.raise_for_status() return res
python
def send_mail(form, from_name): """ Sends a mail using the configured mail server for Pulsar. See mailgun documentation at https://documentation.mailgun.com/en/latest/user_manual.html#sending-via-api for specifics. Args: form: `dict`. The mail form fields, i.e. 'to', 'from', ... Returns: `requests.models.Response` instance. Raises: `requests.exceptions.HTTPError`: The status code is not ok. `Exception`: The environment variable MAILGUN_DOMAIN or MAILGUN_API_KEY isn't set. Example:: payload = { "from"="{} <mailgun@{}>".format(from_name, pulsarpy.MAIL_DOMAIN), "subject": "mailgun test", "text": "howdy there", "to": "nathankw@stanford.edu", } send_mail(payload) """ form["from"] = "{} <mailgun@{}>".format(from_name, pulsarpy.MAIL_DOMAIN), if not pulsarpy.MAIL_SERVER_URL: raise Exception("MAILGUN_DOMAIN environment variable not set.") if not pulsarpy.MAIL_AUTH[1]: raise Exception("MAILGUN_API_KEY environment varible not set.") res = requests.post(pulsarpy.MAIL_SERVER_URL, data=form, auth=pulsarpy.MAIL_AUTH) res.raise_for_status() return res
[ "def", "send_mail", "(", "form", ",", "from_name", ")", ":", "form", "[", "\"from\"", "]", "=", "\"{} <mailgun@{}>\"", ".", "format", "(", "from_name", ",", "pulsarpy", ".", "MAIL_DOMAIN", ")", ",", "if", "not", "pulsarpy", ".", "MAIL_SERVER_URL", ":", "raise", "Exception", "(", "\"MAILGUN_DOMAIN environment variable not set.\"", ")", "if", "not", "pulsarpy", ".", "MAIL_AUTH", "[", "1", "]", ":", "raise", "Exception", "(", "\"MAILGUN_API_KEY environment varible not set.\"", ")", "res", "=", "requests", ".", "post", "(", "pulsarpy", ".", "MAIL_SERVER_URL", ",", "data", "=", "form", ",", "auth", "=", "pulsarpy", ".", "MAIL_AUTH", ")", "res", ".", "raise_for_status", "(", ")", "return", "res" ]
Sends a mail using the configured mail server for Pulsar. See mailgun documentation at https://documentation.mailgun.com/en/latest/user_manual.html#sending-via-api for specifics. Args: form: `dict`. The mail form fields, i.e. 'to', 'from', ... Returns: `requests.models.Response` instance. Raises: `requests.exceptions.HTTPError`: The status code is not ok. `Exception`: The environment variable MAILGUN_DOMAIN or MAILGUN_API_KEY isn't set. Example:: payload = { "from"="{} <mailgun@{}>".format(from_name, pulsarpy.MAIL_DOMAIN), "subject": "mailgun test", "text": "howdy there", "to": "nathankw@stanford.edu", } send_mail(payload)
[ "Sends", "a", "mail", "using", "the", "configured", "mail", "server", "for", "Pulsar", ".", "See", "mailgun", "documentation", "at", "https", ":", "//", "documentation", ".", "mailgun", ".", "com", "/", "en", "/", "latest", "/", "user_manual", ".", "html#sending", "-", "via", "-", "api", "for", "specifics", ".", "Args", ":", "form", ":", "dict", ".", "The", "mail", "form", "fields", "i", ".", "e", ".", "to", "from", "..." ]
train
https://github.com/nathankw/pulsarpy/blob/359b040c0f2383b88c0b5548715fefd35f5f634c/pulsarpy/utils.py#L15-L48
nathankw/pulsarpy
pulsarpy/utils.py
get_exp_of_biosample
def get_exp_of_biosample(biosample_rec): """ Determines whether the biosample is part of a ChipseqExperiment or SingleCellSorting Experiment, and if so, returns the associated experiment as a models.Model instance that is one of those two classes. The biosample is determined to be part of a ChipseqExperiment if the Biosample.chipseq_experiment_id attribute is set, meaning that the biosample can be associated to the ChipseqExperiment as a replicate via any of of the following ChipseqExperiment attributes: ChipseqExperiment.replicates ChipseqExperiment.control_replicates The biosample will be determined to be part of a SingleCellSorting experiment if the Biosample.sorting_biosample_single_cell_sorting attribute is set, meaning that it is the SingleCellSorting.sorting_biosample. Args: biosample_rec: `dict`. A Biosample record as returned by instantiating `models.Biosample`. Raises: `Exception`: An experiment is not associated to this biosample. """ chip_exp_id = biosample_rec.chipseq_experiment_id ssc_id = biosample_rec.sorting_biosample_single_cell_sorting_id if chip_exp_id: return {"type": "chipseq_experiment", "record": models.ChipseqExperiment(chip_exp_id)} elif ssc_id: return {"type": "single_cell_sorting", "record": models.SingleCellSorting(ssc_id)} raise Exception("Biosample {} is not on an experiment.".format(biosample_rec["id"]))
python
def get_exp_of_biosample(biosample_rec): """ Determines whether the biosample is part of a ChipseqExperiment or SingleCellSorting Experiment, and if so, returns the associated experiment as a models.Model instance that is one of those two classes. The biosample is determined to be part of a ChipseqExperiment if the Biosample.chipseq_experiment_id attribute is set, meaning that the biosample can be associated to the ChipseqExperiment as a replicate via any of of the following ChipseqExperiment attributes: ChipseqExperiment.replicates ChipseqExperiment.control_replicates The biosample will be determined to be part of a SingleCellSorting experiment if the Biosample.sorting_biosample_single_cell_sorting attribute is set, meaning that it is the SingleCellSorting.sorting_biosample. Args: biosample_rec: `dict`. A Biosample record as returned by instantiating `models.Biosample`. Raises: `Exception`: An experiment is not associated to this biosample. """ chip_exp_id = biosample_rec.chipseq_experiment_id ssc_id = biosample_rec.sorting_biosample_single_cell_sorting_id if chip_exp_id: return {"type": "chipseq_experiment", "record": models.ChipseqExperiment(chip_exp_id)} elif ssc_id: return {"type": "single_cell_sorting", "record": models.SingleCellSorting(ssc_id)} raise Exception("Biosample {} is not on an experiment.".format(biosample_rec["id"]))
[ "def", "get_exp_of_biosample", "(", "biosample_rec", ")", ":", "chip_exp_id", "=", "biosample_rec", ".", "chipseq_experiment_id", "ssc_id", "=", "biosample_rec", ".", "sorting_biosample_single_cell_sorting_id", "if", "chip_exp_id", ":", "return", "{", "\"type\"", ":", "\"chipseq_experiment\"", ",", "\"record\"", ":", "models", ".", "ChipseqExperiment", "(", "chip_exp_id", ")", "}", "elif", "ssc_id", ":", "return", "{", "\"type\"", ":", "\"single_cell_sorting\"", ",", "\"record\"", ":", "models", ".", "SingleCellSorting", "(", "ssc_id", ")", "}", "raise", "Exception", "(", "\"Biosample {} is not on an experiment.\"", ".", "format", "(", "biosample_rec", "[", "\"id\"", "]", ")", ")" ]
Determines whether the biosample is part of a ChipseqExperiment or SingleCellSorting Experiment, and if so, returns the associated experiment as a models.Model instance that is one of those two classes. The biosample is determined to be part of a ChipseqExperiment if the Biosample.chipseq_experiment_id attribute is set, meaning that the biosample can be associated to the ChipseqExperiment as a replicate via any of of the following ChipseqExperiment attributes: ChipseqExperiment.replicates ChipseqExperiment.control_replicates The biosample will be determined to be part of a SingleCellSorting experiment if the Biosample.sorting_biosample_single_cell_sorting attribute is set, meaning that it is the SingleCellSorting.sorting_biosample. Args: biosample_rec: `dict`. A Biosample record as returned by instantiating `models.Biosample`. Raises: `Exception`: An experiment is not associated to this biosample.
[ "Determines", "whether", "the", "biosample", "is", "part", "of", "a", "ChipseqExperiment", "or", "SingleCellSorting", "Experiment", "and", "if", "so", "returns", "the", "associated", "experiment", "as", "a", "models", ".", "Model", "instance", "that", "is", "one", "of", "those", "two", "classes", ".", "The", "biosample", "is", "determined", "to", "be", "part", "of", "a", "ChipseqExperiment", "if", "the", "Biosample", ".", "chipseq_experiment_id", "attribute", "is", "set", "meaning", "that", "the", "biosample", "can", "be", "associated", "to", "the", "ChipseqExperiment", "as", "a", "replicate", "via", "any", "of", "of", "the", "following", "ChipseqExperiment", "attributes", ":" ]
train
https://github.com/nathankw/pulsarpy/blob/359b040c0f2383b88c0b5548715fefd35f5f634c/pulsarpy/utils.py#L57-L84
klmitch/cli_tools
cli_tools.py
_clean_text
def _clean_text(text): """ Clean up a multiple-line, potentially multiple-paragraph text string. This is used to extract the first paragraph of a string and eliminate line breaks and indentation. Lines will be joined together by a single space. :param text: The text string to clean up. It is safe to pass ``None``. :returns: The first paragraph, cleaned up as described above. """ desc = [] for line in (text or '').strip().split('\n'): # Clean up the line... line = line.strip() # We only want the first paragraph if not line: break desc.append(line) return ' '.join(desc)
python
def _clean_text(text): """ Clean up a multiple-line, potentially multiple-paragraph text string. This is used to extract the first paragraph of a string and eliminate line breaks and indentation. Lines will be joined together by a single space. :param text: The text string to clean up. It is safe to pass ``None``. :returns: The first paragraph, cleaned up as described above. """ desc = [] for line in (text or '').strip().split('\n'): # Clean up the line... line = line.strip() # We only want the first paragraph if not line: break desc.append(line) return ' '.join(desc)
[ "def", "_clean_text", "(", "text", ")", ":", "desc", "=", "[", "]", "for", "line", "in", "(", "text", "or", "''", ")", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", ":", "# Clean up the line...", "line", "=", "line", ".", "strip", "(", ")", "# We only want the first paragraph", "if", "not", "line", ":", "break", "desc", ".", "append", "(", "line", ")", "return", "' '", ".", "join", "(", "desc", ")" ]
Clean up a multiple-line, potentially multiple-paragraph text string. This is used to extract the first paragraph of a string and eliminate line breaks and indentation. Lines will be joined together by a single space. :param text: The text string to clean up. It is safe to pass ``None``. :returns: The first paragraph, cleaned up as described above.
[ "Clean", "up", "a", "multiple", "-", "line", "potentially", "multiple", "-", "paragraph", "text", "string", ".", "This", "is", "used", "to", "extract", "the", "first", "paragraph", "of", "a", "string", "and", "eliminate", "line", "breaks", "and", "indentation", ".", "Lines", "will", "be", "joined", "together", "by", "a", "single", "space", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L28-L52
klmitch/cli_tools
cli_tools.py
prog
def prog(text): """ Decorator used to specify the program name for the console script help message. :param text: The text to use for the program name. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.prog = text return func return decorator
python
def prog(text): """ Decorator used to specify the program name for the console script help message. :param text: The text to use for the program name. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.prog = text return func return decorator
[ "def", "prog", "(", "text", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "prog", "=", "text", "return", "func", "return", "decorator" ]
Decorator used to specify the program name for the console script help message. :param text: The text to use for the program name.
[ "Decorator", "used", "to", "specify", "the", "program", "name", "for", "the", "console", "script", "help", "message", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L693-L705
klmitch/cli_tools
cli_tools.py
usage
def usage(text): """ Decorator used to specify a usage string for the console script help message. :param text: The text to use for the usage. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.usage = text return func return decorator
python
def usage(text): """ Decorator used to specify a usage string for the console script help message. :param text: The text to use for the usage. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.usage = text return func return decorator
[ "def", "usage", "(", "text", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "usage", "=", "text", "return", "func", "return", "decorator" ]
Decorator used to specify a usage string for the console script help message. :param text: The text to use for the usage.
[ "Decorator", "used", "to", "specify", "a", "usage", "string", "for", "the", "console", "script", "help", "message", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L708-L720
klmitch/cli_tools
cli_tools.py
description
def description(text): """ Decorator used to specify a short description of the console script. This can be used to override the default, which is derived from the docstring of the function. :param text: The text to use for the description. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.description = text return func return decorator
python
def description(text): """ Decorator used to specify a short description of the console script. This can be used to override the default, which is derived from the docstring of the function. :param text: The text to use for the description. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.description = text return func return decorator
[ "def", "description", "(", "text", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "description", "=", "text", "return", "func", "return", "decorator" ]
Decorator used to specify a short description of the console script. This can be used to override the default, which is derived from the docstring of the function. :param text: The text to use for the description.
[ "Decorator", "used", "to", "specify", "a", "short", "description", "of", "the", "console", "script", ".", "This", "can", "be", "used", "to", "override", "the", "default", "which", "is", "derived", "from", "the", "docstring", "of", "the", "function", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L723-L736
klmitch/cli_tools
cli_tools.py
epilog
def epilog(text): """ Decorator used to specify an epilog for the console script help message. :param text: The text to use for the epilog. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.epilog = text return func return decorator
python
def epilog(text): """ Decorator used to specify an epilog for the console script help message. :param text: The text to use for the epilog. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.epilog = text return func return decorator
[ "def", "epilog", "(", "text", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "epilog", "=", "text", "return", "func", "return", "decorator" ]
Decorator used to specify an epilog for the console script help message. :param text: The text to use for the epilog.
[ "Decorator", "used", "to", "specify", "an", "epilog", "for", "the", "console", "script", "help", "message", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L739-L751
klmitch/cli_tools
cli_tools.py
formatter_class
def formatter_class(klass): """ Decorator used to specify the formatter class for the console script. :param klass: The formatter class to use. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.formatter_class = klass return func return decorator
python
def formatter_class(klass): """ Decorator used to specify the formatter class for the console script. :param klass: The formatter class to use. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.formatter_class = klass return func return decorator
[ "def", "formatter_class", "(", "klass", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "formatter_class", "=", "klass", "return", "func", "return", "decorator" ]
Decorator used to specify the formatter class for the console script. :param klass: The formatter class to use.
[ "Decorator", "used", "to", "specify", "the", "formatter", "class", "for", "the", "console", "script", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L754-L766
klmitch/cli_tools
cli_tools.py
argument
def argument(*args, **kwargs): """ Decorator used to specify an argument taken by the console script. Positional and keyword arguments have the same meaning as those given to ``argparse.ArgumentParser.add_argument()``. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) group = kwargs.pop('group', None) adaptor._add_argument(args, kwargs, group=group) return func return decorator
python
def argument(*args, **kwargs): """ Decorator used to specify an argument taken by the console script. Positional and keyword arguments have the same meaning as those given to ``argparse.ArgumentParser.add_argument()``. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) group = kwargs.pop('group', None) adaptor._add_argument(args, kwargs, group=group) return func return decorator
[ "def", "argument", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "group", "=", "kwargs", ".", "pop", "(", "'group'", ",", "None", ")", "adaptor", ".", "_add_argument", "(", "args", ",", "kwargs", ",", "group", "=", "group", ")", "return", "func", "return", "decorator" ]
Decorator used to specify an argument taken by the console script. Positional and keyword arguments have the same meaning as those given to ``argparse.ArgumentParser.add_argument()``.
[ "Decorator", "used", "to", "specify", "an", "argument", "taken", "by", "the", "console", "script", ".", "Positional", "and", "keyword", "arguments", "have", "the", "same", "meaning", "as", "those", "given", "to", "argparse", ".", "ArgumentParser", ".", "add_argument", "()", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L769-L781
klmitch/cli_tools
cli_tools.py
argument_group
def argument_group(group, **kwargs): """ Decorator used to specify an argument group. Keyword arguments have the same meaning as those given to ``argparse.ArgumentParser.add_argument_group()``. Arguments may be placed in a given argument group by passing the ``group`` keyword argument to @argument(). :param group: The name of the argument group. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor._add_group(group, 'group', kwargs) return func return decorator
python
def argument_group(group, **kwargs): """ Decorator used to specify an argument group. Keyword arguments have the same meaning as those given to ``argparse.ArgumentParser.add_argument_group()``. Arguments may be placed in a given argument group by passing the ``group`` keyword argument to @argument(). :param group: The name of the argument group. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor._add_group(group, 'group', kwargs) return func return decorator
[ "def", "argument_group", "(", "group", ",", "*", "*", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "_add_group", "(", "group", ",", "'group'", ",", "kwargs", ")", "return", "func", "return", "decorator" ]
Decorator used to specify an argument group. Keyword arguments have the same meaning as those given to ``argparse.ArgumentParser.add_argument_group()``. Arguments may be placed in a given argument group by passing the ``group`` keyword argument to @argument(). :param group: The name of the argument group.
[ "Decorator", "used", "to", "specify", "an", "argument", "group", ".", "Keyword", "arguments", "have", "the", "same", "meaning", "as", "those", "given", "to", "argparse", ".", "ArgumentParser", ".", "add_argument_group", "()", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L784-L800
klmitch/cli_tools
cli_tools.py
subparsers
def subparsers(**kwargs): """ Decorator used to specify alternate keyword arguments to pass to the ``argparse.ArgumentParser.add_subparsers()`` call. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.subkwargs = kwargs adaptor.do_subs = True return func return decorator
python
def subparsers(**kwargs): """ Decorator used to specify alternate keyword arguments to pass to the ``argparse.ArgumentParser.add_subparsers()`` call. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.subkwargs = kwargs adaptor.do_subs = True return func return decorator
[ "def", "subparsers", "(", "*", "*", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "subkwargs", "=", "kwargs", "adaptor", ".", "do_subs", "=", "True", "return", "func", "return", "decorator" ]
Decorator used to specify alternate keyword arguments to pass to the ``argparse.ArgumentParser.add_subparsers()`` call.
[ "Decorator", "used", "to", "specify", "alternate", "keyword", "arguments", "to", "pass", "to", "the", "argparse", ".", "ArgumentParser", ".", "add_subparsers", "()", "call", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L822-L833
klmitch/cli_tools
cli_tools.py
load_subcommands
def load_subcommands(group): """ Decorator used to load subcommands from a given ``pkg_resources`` entrypoint group. Each function must be appropriately decorated with the ``cli_tools`` decorators to be considered an extension. :param group: The name of the ``pkg_resources`` entrypoint group. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor._add_extensions(group) return func return decorator
python
def load_subcommands(group): """ Decorator used to load subcommands from a given ``pkg_resources`` entrypoint group. Each function must be appropriately decorated with the ``cli_tools`` decorators to be considered an extension. :param group: The name of the ``pkg_resources`` entrypoint group. """ def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor._add_extensions(group) return func return decorator
[ "def", "load_subcommands", "(", "group", ")", ":", "def", "decorator", "(", "func", ")", ":", "adaptor", "=", "ScriptAdaptor", ".", "_get_adaptor", "(", "func", ")", "adaptor", ".", "_add_extensions", "(", "group", ")", "return", "func", "return", "decorator" ]
Decorator used to load subcommands from a given ``pkg_resources`` entrypoint group. Each function must be appropriately decorated with the ``cli_tools`` decorators to be considered an extension. :param group: The name of the ``pkg_resources`` entrypoint group.
[ "Decorator", "used", "to", "load", "subcommands", "from", "a", "given", "pkg_resources", "entrypoint", "group", ".", "Each", "function", "must", "be", "appropriately", "decorated", "with", "the", "cli_tools", "decorators", "to", "be", "considered", "an", "extension", "." ]
train
https://github.com/klmitch/cli_tools/blob/3f9b5fd8d7458a402b3999618644ffa419d8a946/cli_tools.py#L836-L849
pmacosta/ptrie
ptrie/ptrie.py
Trie._collapse_subtree
def _collapse_subtree(self, name, recursive=True): """Collapse a sub-tree.""" oname = name children = self._db[name]["children"] data = self._db[name]["data"] del_list = [] while (len(children) == 1) and (not data): del_list.append(name) name = children[0] children = self._db[name]["children"] data = self._db[name]["data"] parent = self._db[oname]["parent"] self._db[name]["parent"] = parent if parent: self._db[parent]["children"].remove(oname) self._db[parent]["children"] = sorted(self._db[parent]["children"] + [name]) else: self._root = name self._root_hierarchy_length = len( self.root_name.split(self._node_separator) ) for node in del_list: self._del_node(node) if recursive: for child in copy.copy(children): self._collapse_subtree(child)
python
def _collapse_subtree(self, name, recursive=True): """Collapse a sub-tree.""" oname = name children = self._db[name]["children"] data = self._db[name]["data"] del_list = [] while (len(children) == 1) and (not data): del_list.append(name) name = children[0] children = self._db[name]["children"] data = self._db[name]["data"] parent = self._db[oname]["parent"] self._db[name]["parent"] = parent if parent: self._db[parent]["children"].remove(oname) self._db[parent]["children"] = sorted(self._db[parent]["children"] + [name]) else: self._root = name self._root_hierarchy_length = len( self.root_name.split(self._node_separator) ) for node in del_list: self._del_node(node) if recursive: for child in copy.copy(children): self._collapse_subtree(child)
[ "def", "_collapse_subtree", "(", "self", ",", "name", ",", "recursive", "=", "True", ")", ":", "oname", "=", "name", "children", "=", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]", "data", "=", "self", ".", "_db", "[", "name", "]", "[", "\"data\"", "]", "del_list", "=", "[", "]", "while", "(", "len", "(", "children", ")", "==", "1", ")", "and", "(", "not", "data", ")", ":", "del_list", ".", "append", "(", "name", ")", "name", "=", "children", "[", "0", "]", "children", "=", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]", "data", "=", "self", ".", "_db", "[", "name", "]", "[", "\"data\"", "]", "parent", "=", "self", ".", "_db", "[", "oname", "]", "[", "\"parent\"", "]", "self", ".", "_db", "[", "name", "]", "[", "\"parent\"", "]", "=", "parent", "if", "parent", ":", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", ".", "remove", "(", "oname", ")", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "=", "sorted", "(", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "+", "[", "name", "]", ")", "else", ":", "self", ".", "_root", "=", "name", "self", ".", "_root_hierarchy_length", "=", "len", "(", "self", ".", "root_name", ".", "split", "(", "self", ".", "_node_separator", ")", ")", "for", "node", "in", "del_list", ":", "self", ".", "_del_node", "(", "node", ")", "if", "recursive", ":", "for", "child", "in", "copy", ".", "copy", "(", "children", ")", ":", "self", ".", "_collapse_subtree", "(", "child", ")" ]
Collapse a sub-tree.
[ "Collapse", "a", "sub", "-", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L127-L152
pmacosta/ptrie
ptrie/ptrie.py
Trie._create_intermediate_nodes
def _create_intermediate_nodes(self, name): """Create intermediate nodes if hierarchy does not exist.""" hierarchy = self._split_node_name(name, self.root_name) node_tree = [ self.root_name + self._node_separator + self._node_separator.join(hierarchy[: num + 1]) for num in range(len(hierarchy)) ] iobj = [ (child[: child.rfind(self._node_separator)], child) for child in node_tree if child not in self._db ] for parent, child in iobj: self._db[child] = {"parent": parent, "children": [], "data": []} self._db[parent]["children"] = sorted( self._db[parent]["children"] + [child] )
python
def _create_intermediate_nodes(self, name): """Create intermediate nodes if hierarchy does not exist.""" hierarchy = self._split_node_name(name, self.root_name) node_tree = [ self.root_name + self._node_separator + self._node_separator.join(hierarchy[: num + 1]) for num in range(len(hierarchy)) ] iobj = [ (child[: child.rfind(self._node_separator)], child) for child in node_tree if child not in self._db ] for parent, child in iobj: self._db[child] = {"parent": parent, "children": [], "data": []} self._db[parent]["children"] = sorted( self._db[parent]["children"] + [child] )
[ "def", "_create_intermediate_nodes", "(", "self", ",", "name", ")", ":", "hierarchy", "=", "self", ".", "_split_node_name", "(", "name", ",", "self", ".", "root_name", ")", "node_tree", "=", "[", "self", ".", "root_name", "+", "self", ".", "_node_separator", "+", "self", ".", "_node_separator", ".", "join", "(", "hierarchy", "[", ":", "num", "+", "1", "]", ")", "for", "num", "in", "range", "(", "len", "(", "hierarchy", ")", ")", "]", "iobj", "=", "[", "(", "child", "[", ":", "child", ".", "rfind", "(", "self", ".", "_node_separator", ")", "]", ",", "child", ")", "for", "child", "in", "node_tree", "if", "child", "not", "in", "self", ".", "_db", "]", "for", "parent", ",", "child", "in", "iobj", ":", "self", ".", "_db", "[", "child", "]", "=", "{", "\"parent\"", ":", "parent", ",", "\"children\"", ":", "[", "]", ",", "\"data\"", ":", "[", "]", "}", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "=", "sorted", "(", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "+", "[", "child", "]", ")" ]
Create intermediate nodes if hierarchy does not exist.
[ "Create", "intermediate", "nodes", "if", "hierarchy", "does", "not", "exist", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L154-L172
pmacosta/ptrie
ptrie/ptrie.py
Trie._create_node
def _create_node(self, name, parent, children, data): """Create new tree node.""" self._db[name] = {"parent": parent, "children": children, "data": data}
python
def _create_node(self, name, parent, children, data): """Create new tree node.""" self._db[name] = {"parent": parent, "children": children, "data": data}
[ "def", "_create_node", "(", "self", ",", "name", ",", "parent", ",", "children", ",", "data", ")", ":", "self", ".", "_db", "[", "name", "]", "=", "{", "\"parent\"", ":", "parent", ",", "\"children\"", ":", "children", ",", "\"data\"", ":", "data", "}" ]
Create new tree node.
[ "Create", "new", "tree", "node", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L174-L176
pmacosta/ptrie
ptrie/ptrie.py
Trie._delete_subtree
def _delete_subtree(self, nodes): """ Delete subtree private method. No argument validation and usage of getter/setter private methods is used for speed """ nodes = nodes if isinstance(nodes, list) else [nodes] iobj = [ (self._db[node]["parent"], node) for node in nodes if self._node_name_in_tree(node) ] for parent, node in iobj: # Delete link to parent (if not root node) del_list = self._get_subtree(node) if parent: self._db[parent]["children"].remove(node) # Delete children (sub-tree) for child in del_list: del self._db[child] if self._empty_tree(): self._root = None self._root_hierarchy_length = None
python
def _delete_subtree(self, nodes): """ Delete subtree private method. No argument validation and usage of getter/setter private methods is used for speed """ nodes = nodes if isinstance(nodes, list) else [nodes] iobj = [ (self._db[node]["parent"], node) for node in nodes if self._node_name_in_tree(node) ] for parent, node in iobj: # Delete link to parent (if not root node) del_list = self._get_subtree(node) if parent: self._db[parent]["children"].remove(node) # Delete children (sub-tree) for child in del_list: del self._db[child] if self._empty_tree(): self._root = None self._root_hierarchy_length = None
[ "def", "_delete_subtree", "(", "self", ",", "nodes", ")", ":", "nodes", "=", "nodes", "if", "isinstance", "(", "nodes", ",", "list", ")", "else", "[", "nodes", "]", "iobj", "=", "[", "(", "self", ".", "_db", "[", "node", "]", "[", "\"parent\"", "]", ",", "node", ")", "for", "node", "in", "nodes", "if", "self", ".", "_node_name_in_tree", "(", "node", ")", "]", "for", "parent", ",", "node", "in", "iobj", ":", "# Delete link to parent (if not root node)", "del_list", "=", "self", ".", "_get_subtree", "(", "node", ")", "if", "parent", ":", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", ".", "remove", "(", "node", ")", "# Delete children (sub-tree)", "for", "child", "in", "del_list", ":", "del", "self", ".", "_db", "[", "child", "]", "if", "self", ".", "_empty_tree", "(", ")", ":", "self", ".", "_root", "=", "None", "self", ".", "_root_hierarchy_length", "=", "None" ]
Delete subtree private method. No argument validation and usage of getter/setter private methods is used for speed
[ "Delete", "subtree", "private", "method", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L190-L213
pmacosta/ptrie
ptrie/ptrie.py
Trie._find_common_prefix
def _find_common_prefix(self, node1, node2): """Find common prefix between two nodes.""" tokens1 = [item.strip() for item in node1.split(self.node_separator)] tokens2 = [item.strip() for item in node2.split(self.node_separator)] ret = [] for token1, token2 in zip(tokens1, tokens2): if token1 == token2: ret.append(token1) else: break return self.node_separator.join(ret)
python
def _find_common_prefix(self, node1, node2): """Find common prefix between two nodes.""" tokens1 = [item.strip() for item in node1.split(self.node_separator)] tokens2 = [item.strip() for item in node2.split(self.node_separator)] ret = [] for token1, token2 in zip(tokens1, tokens2): if token1 == token2: ret.append(token1) else: break return self.node_separator.join(ret)
[ "def", "_find_common_prefix", "(", "self", ",", "node1", ",", "node2", ")", ":", "tokens1", "=", "[", "item", ".", "strip", "(", ")", "for", "item", "in", "node1", ".", "split", "(", "self", ".", "node_separator", ")", "]", "tokens2", "=", "[", "item", ".", "strip", "(", ")", "for", "item", "in", "node2", ".", "split", "(", "self", ".", "node_separator", ")", "]", "ret", "=", "[", "]", "for", "token1", ",", "token2", "in", "zip", "(", "tokens1", ",", "tokens2", ")", ":", "if", "token1", "==", "token2", ":", "ret", ".", "append", "(", "token1", ")", "else", ":", "break", "return", "self", ".", "node_separator", ".", "join", "(", "ret", ")" ]
Find common prefix between two nodes.
[ "Find", "common", "prefix", "between", "two", "nodes", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L223-L233
pmacosta/ptrie
ptrie/ptrie.py
Trie._prt
def _prt(self, name, lparent, sep, pre1, pre2): """ Print a row (leaf) of tree. :param name: Full node name :type name: string :param lparent: Position in full node name of last separator before node to be printed :type lparent: integer :param pre1: Connector next to node name, either a null character if the node to print is the root node, a right angle if node name to be printed is a leaf or a rotated "T" if the node name to be printed is one of many children :type pre1: string """ # pylint: disable=R0914 nname = name[lparent + 1 :] children = self._db[name]["children"] ncmu = len(children) - 1 plst1 = ncmu * [self._vertical_and_right] + [self._up_and_right] plst2 = ncmu * [self._vertical] + [" "] slist = (ncmu + 1) * [sep + pre2] dmark = " (*)" if self._db[name]["data"] else "" return "\n".join( [ u"{sep}{connector}{name}{dmark}".format( sep=sep, connector=pre1, name=nname, dmark=dmark ) ] + [ self._prt(child, len(name), sep=schar, pre1=p1, pre2=p2) for child, p1, p2, schar in zip(children, plst1, plst2, slist) ] )
python
def _prt(self, name, lparent, sep, pre1, pre2): """ Print a row (leaf) of tree. :param name: Full node name :type name: string :param lparent: Position in full node name of last separator before node to be printed :type lparent: integer :param pre1: Connector next to node name, either a null character if the node to print is the root node, a right angle if node name to be printed is a leaf or a rotated "T" if the node name to be printed is one of many children :type pre1: string """ # pylint: disable=R0914 nname = name[lparent + 1 :] children = self._db[name]["children"] ncmu = len(children) - 1 plst1 = ncmu * [self._vertical_and_right] + [self._up_and_right] plst2 = ncmu * [self._vertical] + [" "] slist = (ncmu + 1) * [sep + pre2] dmark = " (*)" if self._db[name]["data"] else "" return "\n".join( [ u"{sep}{connector}{name}{dmark}".format( sep=sep, connector=pre1, name=nname, dmark=dmark ) ] + [ self._prt(child, len(name), sep=schar, pre1=p1, pre2=p2) for child, p1, p2, schar in zip(children, plst1, plst2, slist) ] )
[ "def", "_prt", "(", "self", ",", "name", ",", "lparent", ",", "sep", ",", "pre1", ",", "pre2", ")", ":", "# pylint: disable=R0914", "nname", "=", "name", "[", "lparent", "+", "1", ":", "]", "children", "=", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]", "ncmu", "=", "len", "(", "children", ")", "-", "1", "plst1", "=", "ncmu", "*", "[", "self", ".", "_vertical_and_right", "]", "+", "[", "self", ".", "_up_and_right", "]", "plst2", "=", "ncmu", "*", "[", "self", ".", "_vertical", "]", "+", "[", "\" \"", "]", "slist", "=", "(", "ncmu", "+", "1", ")", "*", "[", "sep", "+", "pre2", "]", "dmark", "=", "\" (*)\"", "if", "self", ".", "_db", "[", "name", "]", "[", "\"data\"", "]", "else", "\"\"", "return", "\"\\n\"", ".", "join", "(", "[", "u\"{sep}{connector}{name}{dmark}\"", ".", "format", "(", "sep", "=", "sep", ",", "connector", "=", "pre1", ",", "name", "=", "nname", ",", "dmark", "=", "dmark", ")", "]", "+", "[", "self", ".", "_prt", "(", "child", ",", "len", "(", "name", ")", ",", "sep", "=", "schar", ",", "pre1", "=", "p1", ",", "pre2", "=", "p2", ")", "for", "child", ",", "p1", ",", "p2", ",", "schar", "in", "zip", "(", "children", ",", "plst1", ",", "plst2", ",", "slist", ")", "]", ")" ]
Print a row (leaf) of tree. :param name: Full node name :type name: string :param lparent: Position in full node name of last separator before node to be printed :type lparent: integer :param pre1: Connector next to node name, either a null character if the node to print is the root node, a right angle if node name to be printed is a leaf or a rotated "T" if the node name to be printed is one of many children :type pre1: string
[ "Print", "a", "row", "(", "leaf", ")", "of", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L273-L308
pmacosta/ptrie
ptrie/ptrie.py
Trie._rename_node
def _rename_node(self, name, new_name): """ Rename node private method. No argument validation and usage of getter/setter private methods is used for speed """ # Update parent if not self.is_root(name): parent = self._db[name]["parent"] self._db[parent]["children"].remove(name) self._db[parent]["children"] = sorted( self._db[parent]["children"] + [new_name] ) # Update children iobj = self._get_subtree(name) if name != self.root_name else self.nodes for key in iobj: new_key = key.replace(name, new_name, 1) new_parent = ( self._db[key]["parent"] if key == name else self._db[key]["parent"].replace(name, new_name, 1) ) self._db[new_key] = { "parent": new_parent, "children": [ child.replace(name, new_name, 1) for child in self._db[key]["children"] ], "data": copy.deepcopy(self._db[key]["data"]), } del self._db[key] if name == self.root_name: self._root = new_name self._root_hierarchy_length = len( self.root_name.split(self._node_separator) )
python
def _rename_node(self, name, new_name): """ Rename node private method. No argument validation and usage of getter/setter private methods is used for speed """ # Update parent if not self.is_root(name): parent = self._db[name]["parent"] self._db[parent]["children"].remove(name) self._db[parent]["children"] = sorted( self._db[parent]["children"] + [new_name] ) # Update children iobj = self._get_subtree(name) if name != self.root_name else self.nodes for key in iobj: new_key = key.replace(name, new_name, 1) new_parent = ( self._db[key]["parent"] if key == name else self._db[key]["parent"].replace(name, new_name, 1) ) self._db[new_key] = { "parent": new_parent, "children": [ child.replace(name, new_name, 1) for child in self._db[key]["children"] ], "data": copy.deepcopy(self._db[key]["data"]), } del self._db[key] if name == self.root_name: self._root = new_name self._root_hierarchy_length = len( self.root_name.split(self._node_separator) )
[ "def", "_rename_node", "(", "self", ",", "name", ",", "new_name", ")", ":", "# Update parent", "if", "not", "self", ".", "is_root", "(", "name", ")", ":", "parent", "=", "self", ".", "_db", "[", "name", "]", "[", "\"parent\"", "]", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", ".", "remove", "(", "name", ")", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "=", "sorted", "(", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "+", "[", "new_name", "]", ")", "# Update children", "iobj", "=", "self", ".", "_get_subtree", "(", "name", ")", "if", "name", "!=", "self", ".", "root_name", "else", "self", ".", "nodes", "for", "key", "in", "iobj", ":", "new_key", "=", "key", ".", "replace", "(", "name", ",", "new_name", ",", "1", ")", "new_parent", "=", "(", "self", ".", "_db", "[", "key", "]", "[", "\"parent\"", "]", "if", "key", "==", "name", "else", "self", ".", "_db", "[", "key", "]", "[", "\"parent\"", "]", ".", "replace", "(", "name", ",", "new_name", ",", "1", ")", ")", "self", ".", "_db", "[", "new_key", "]", "=", "{", "\"parent\"", ":", "new_parent", ",", "\"children\"", ":", "[", "child", ".", "replace", "(", "name", ",", "new_name", ",", "1", ")", "for", "child", "in", "self", ".", "_db", "[", "key", "]", "[", "\"children\"", "]", "]", ",", "\"data\"", ":", "copy", ".", "deepcopy", "(", "self", ".", "_db", "[", "key", "]", "[", "\"data\"", "]", ")", ",", "}", "del", "self", ".", "_db", "[", "key", "]", "if", "name", "==", "self", ".", "root_name", ":", "self", ".", "_root", "=", "new_name", "self", ".", "_root_hierarchy_length", "=", "len", "(", "self", ".", "root_name", ".", "split", "(", "self", ".", "_node_separator", ")", ")" ]
Rename node private method. No argument validation and usage of getter/setter private methods is used for speed
[ "Rename", "node", "private", "method", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L310-L346
pmacosta/ptrie
ptrie/ptrie.py
Trie._search_tree
def _search_tree(self, name): """Search_tree for nodes that contain a specific hierarchy name.""" tpl1 = "{sep}{name}{sep}".format(sep=self._node_separator, name=name) tpl2 = "{sep}{name}".format(sep=self._node_separator, name=name) tpl3 = "{name}{sep}".format(sep=self._node_separator, name=name) return sorted( [ node for node in self._db if (tpl1 in node) or node.endswith(tpl2) or node.startswith(tpl3) or (name == node) ] )
python
def _search_tree(self, name): """Search_tree for nodes that contain a specific hierarchy name.""" tpl1 = "{sep}{name}{sep}".format(sep=self._node_separator, name=name) tpl2 = "{sep}{name}".format(sep=self._node_separator, name=name) tpl3 = "{name}{sep}".format(sep=self._node_separator, name=name) return sorted( [ node for node in self._db if (tpl1 in node) or node.endswith(tpl2) or node.startswith(tpl3) or (name == node) ] )
[ "def", "_search_tree", "(", "self", ",", "name", ")", ":", "tpl1", "=", "\"{sep}{name}{sep}\"", ".", "format", "(", "sep", "=", "self", ".", "_node_separator", ",", "name", "=", "name", ")", "tpl2", "=", "\"{sep}{name}\"", ".", "format", "(", "sep", "=", "self", ".", "_node_separator", ",", "name", "=", "name", ")", "tpl3", "=", "\"{name}{sep}\"", ".", "format", "(", "sep", "=", "self", ".", "_node_separator", ",", "name", "=", "name", ")", "return", "sorted", "(", "[", "node", "for", "node", "in", "self", ".", "_db", "if", "(", "tpl1", "in", "node", ")", "or", "node", ".", "endswith", "(", "tpl2", ")", "or", "node", ".", "startswith", "(", "tpl3", ")", "or", "(", "name", "==", "node", ")", "]", ")" ]
Search_tree for nodes that contain a specific hierarchy name.
[ "Search_tree", "for", "nodes", "that", "contain", "a", "specific", "hierarchy", "name", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L348-L362
pmacosta/ptrie
ptrie/ptrie.py
Trie._validate_node_name
def _validate_node_name(self, var_value): """Validate NodeName pseudo-type.""" # pylint: disable=R0201 var_values = var_value if isinstance(var_value, list) else [var_value] for item in var_values: if (not isinstance(item, str)) or ( isinstance(item, str) and ( (" " in item) or any( [ element.strip() == "" for element in item.strip().split(self._node_separator) ] ) ) ): return True return False
python
def _validate_node_name(self, var_value): """Validate NodeName pseudo-type.""" # pylint: disable=R0201 var_values = var_value if isinstance(var_value, list) else [var_value] for item in var_values: if (not isinstance(item, str)) or ( isinstance(item, str) and ( (" " in item) or any( [ element.strip() == "" for element in item.strip().split(self._node_separator) ] ) ) ): return True return False
[ "def", "_validate_node_name", "(", "self", ",", "var_value", ")", ":", "# pylint: disable=R0201", "var_values", "=", "var_value", "if", "isinstance", "(", "var_value", ",", "list", ")", "else", "[", "var_value", "]", "for", "item", "in", "var_values", ":", "if", "(", "not", "isinstance", "(", "item", ",", "str", ")", ")", "or", "(", "isinstance", "(", "item", ",", "str", ")", "and", "(", "(", "\" \"", "in", "item", ")", "or", "any", "(", "[", "element", ".", "strip", "(", ")", "==", "\"\"", "for", "element", "in", "item", ".", "strip", "(", ")", ".", "split", "(", "self", ".", "_node_separator", ")", "]", ")", ")", ")", ":", "return", "True", "return", "False" ]
Validate NodeName pseudo-type.
[ "Validate", "NodeName", "pseudo", "-", "type", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L381-L399
pmacosta/ptrie
ptrie/ptrie.py
Trie._validate_nodes_with_data
def _validate_nodes_with_data(self, names): """Validate NodeWithData pseudo-type.""" names = names if isinstance(names, list) else [names] if not names: raise RuntimeError("Argument `nodes` is not valid") for ndict in names: if (not isinstance(ndict, dict)) or ( isinstance(ndict, dict) and (set(ndict.keys()) != set(["name", "data"])) ): raise RuntimeError("Argument `nodes` is not valid") name = ndict["name"] if (not isinstance(name, str)) or ( isinstance(name, str) and ( (" " in name) or any( [ element.strip() == "" for element in name.strip().split(self._node_separator) ] ) ) ): raise RuntimeError("Argument `nodes` is not valid")
python
def _validate_nodes_with_data(self, names): """Validate NodeWithData pseudo-type.""" names = names if isinstance(names, list) else [names] if not names: raise RuntimeError("Argument `nodes` is not valid") for ndict in names: if (not isinstance(ndict, dict)) or ( isinstance(ndict, dict) and (set(ndict.keys()) != set(["name", "data"])) ): raise RuntimeError("Argument `nodes` is not valid") name = ndict["name"] if (not isinstance(name, str)) or ( isinstance(name, str) and ( (" " in name) or any( [ element.strip() == "" for element in name.strip().split(self._node_separator) ] ) ) ): raise RuntimeError("Argument `nodes` is not valid")
[ "def", "_validate_nodes_with_data", "(", "self", ",", "names", ")", ":", "names", "=", "names", "if", "isinstance", "(", "names", ",", "list", ")", "else", "[", "names", "]", "if", "not", "names", ":", "raise", "RuntimeError", "(", "\"Argument `nodes` is not valid\"", ")", "for", "ndict", "in", "names", ":", "if", "(", "not", "isinstance", "(", "ndict", ",", "dict", ")", ")", "or", "(", "isinstance", "(", "ndict", ",", "dict", ")", "and", "(", "set", "(", "ndict", ".", "keys", "(", ")", ")", "!=", "set", "(", "[", "\"name\"", ",", "\"data\"", "]", ")", ")", ")", ":", "raise", "RuntimeError", "(", "\"Argument `nodes` is not valid\"", ")", "name", "=", "ndict", "[", "\"name\"", "]", "if", "(", "not", "isinstance", "(", "name", ",", "str", ")", ")", "or", "(", "isinstance", "(", "name", ",", "str", ")", "and", "(", "(", "\" \"", "in", "name", ")", "or", "any", "(", "[", "element", ".", "strip", "(", ")", "==", "\"\"", "for", "element", "in", "name", ".", "strip", "(", ")", ".", "split", "(", "self", ".", "_node_separator", ")", "]", ")", ")", ")", ":", "raise", "RuntimeError", "(", "\"Argument `nodes` is not valid\"", ")" ]
Validate NodeWithData pseudo-type.
[ "Validate", "NodeWithData", "pseudo", "-", "type", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L401-L424
pmacosta/ptrie
ptrie/ptrie.py
Trie.add_nodes
def add_nodes(self, nodes): # noqa: D302 r""" Add nodes to tree. :param nodes: Node(s) to add with associated data. If there are several list items in the argument with the same node name the resulting node data is a list with items corresponding to the data of each entry in the argument with the same node name, in their order of appearance, in addition to any existing node data if the node is already present in the tree :type nodes: :ref:`NodesWithData` :raises: * RuntimeError (Argument \`nodes\` is not valid) * ValueError (Illegal node name: *[node_name]*) For example: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('ptrie_example.py', cog.out) .. =]= .. code-block:: python # ptrie_example.py import ptrie def create_tree(): tobj = ptrie.Trie() tobj.add_nodes([ {'name':'root.branch1', 'data':5}, {'name':'root.branch1', 'data':7}, {'name':'root.branch2', 'data':[]}, {'name':'root.branch1.leaf1', 'data':[]}, {'name':'root.branch1.leaf1.subleaf1', 'data':333}, {'name':'root.branch1.leaf2', 'data':'Hello world!'}, {'name':'root.branch1.leaf2.subleaf2', 'data':[]}, ]) return tobj .. =[=end=]= .. code-block:: python >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.get_data('root.branch1') [5, 7] """ self._validate_nodes_with_data(nodes) nodes = nodes if isinstance(nodes, list) else [nodes] # Create root node (if needed) if not self.root_name: self._set_root_name(nodes[0]["name"].split(self._node_separator)[0].strip()) self._root_hierarchy_length = len( self.root_name.split(self._node_separator) ) self._create_node(name=self.root_name, parent="", children=[], data=[]) # Process new data for node_dict in nodes: name, data = node_dict["name"], node_dict["data"] if name not in self._db: # Validate node name (root of new node same as tree root) if not name.startswith(self.root_name + self._node_separator): raise ValueError("Illegal node name: {0}".format(name)) self._create_intermediate_nodes(name) self._db[name]["data"] += copy.deepcopy( data if isinstance(data, list) and data else ([] if isinstance(data, list) else [data]) )
python
def add_nodes(self, nodes): # noqa: D302 r""" Add nodes to tree. :param nodes: Node(s) to add with associated data. If there are several list items in the argument with the same node name the resulting node data is a list with items corresponding to the data of each entry in the argument with the same node name, in their order of appearance, in addition to any existing node data if the node is already present in the tree :type nodes: :ref:`NodesWithData` :raises: * RuntimeError (Argument \`nodes\` is not valid) * ValueError (Illegal node name: *[node_name]*) For example: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('ptrie_example.py', cog.out) .. =]= .. code-block:: python # ptrie_example.py import ptrie def create_tree(): tobj = ptrie.Trie() tobj.add_nodes([ {'name':'root.branch1', 'data':5}, {'name':'root.branch1', 'data':7}, {'name':'root.branch2', 'data':[]}, {'name':'root.branch1.leaf1', 'data':[]}, {'name':'root.branch1.leaf1.subleaf1', 'data':333}, {'name':'root.branch1.leaf2', 'data':'Hello world!'}, {'name':'root.branch1.leaf2.subleaf2', 'data':[]}, ]) return tobj .. =[=end=]= .. code-block:: python >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.get_data('root.branch1') [5, 7] """ self._validate_nodes_with_data(nodes) nodes = nodes if isinstance(nodes, list) else [nodes] # Create root node (if needed) if not self.root_name: self._set_root_name(nodes[0]["name"].split(self._node_separator)[0].strip()) self._root_hierarchy_length = len( self.root_name.split(self._node_separator) ) self._create_node(name=self.root_name, parent="", children=[], data=[]) # Process new data for node_dict in nodes: name, data = node_dict["name"], node_dict["data"] if name not in self._db: # Validate node name (root of new node same as tree root) if not name.startswith(self.root_name + self._node_separator): raise ValueError("Illegal node name: {0}".format(name)) self._create_intermediate_nodes(name) self._db[name]["data"] += copy.deepcopy( data if isinstance(data, list) and data else ([] if isinstance(data, list) else [data]) )
[ "def", "add_nodes", "(", "self", ",", "nodes", ")", ":", "# noqa: D302", "self", ".", "_validate_nodes_with_data", "(", "nodes", ")", "nodes", "=", "nodes", "if", "isinstance", "(", "nodes", ",", "list", ")", "else", "[", "nodes", "]", "# Create root node (if needed)", "if", "not", "self", ".", "root_name", ":", "self", ".", "_set_root_name", "(", "nodes", "[", "0", "]", "[", "\"name\"", "]", ".", "split", "(", "self", ".", "_node_separator", ")", "[", "0", "]", ".", "strip", "(", ")", ")", "self", ".", "_root_hierarchy_length", "=", "len", "(", "self", ".", "root_name", ".", "split", "(", "self", ".", "_node_separator", ")", ")", "self", ".", "_create_node", "(", "name", "=", "self", ".", "root_name", ",", "parent", "=", "\"\"", ",", "children", "=", "[", "]", ",", "data", "=", "[", "]", ")", "# Process new data", "for", "node_dict", "in", "nodes", ":", "name", ",", "data", "=", "node_dict", "[", "\"name\"", "]", ",", "node_dict", "[", "\"data\"", "]", "if", "name", "not", "in", "self", ".", "_db", ":", "# Validate node name (root of new node same as tree root)", "if", "not", "name", ".", "startswith", "(", "self", ".", "root_name", "+", "self", ".", "_node_separator", ")", ":", "raise", "ValueError", "(", "\"Illegal node name: {0}\"", ".", "format", "(", "name", ")", ")", "self", ".", "_create_intermediate_nodes", "(", "name", ")", "self", ".", "_db", "[", "name", "]", "[", "\"data\"", "]", "+=", "copy", ".", "deepcopy", "(", "data", "if", "isinstance", "(", "data", ",", "list", ")", "and", "data", "else", "(", "[", "]", "if", "isinstance", "(", "data", ",", "list", ")", "else", "[", "data", "]", ")", ")" ]
r""" Add nodes to tree. :param nodes: Node(s) to add with associated data. If there are several list items in the argument with the same node name the resulting node data is a list with items corresponding to the data of each entry in the argument with the same node name, in their order of appearance, in addition to any existing node data if the node is already present in the tree :type nodes: :ref:`NodesWithData` :raises: * RuntimeError (Argument \`nodes\` is not valid) * ValueError (Illegal node name: *[node_name]*) For example: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('ptrie_example.py', cog.out) .. =]= .. code-block:: python # ptrie_example.py import ptrie def create_tree(): tobj = ptrie.Trie() tobj.add_nodes([ {'name':'root.branch1', 'data':5}, {'name':'root.branch1', 'data':7}, {'name':'root.branch2', 'data':[]}, {'name':'root.branch1.leaf1', 'data':[]}, {'name':'root.branch1.leaf1.subleaf1', 'data':333}, {'name':'root.branch1.leaf2', 'data':'Hello world!'}, {'name':'root.branch1.leaf2.subleaf2', 'data':[]}, ]) return tobj .. =[=end=]= .. code-block:: python >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.get_data('root.branch1') [5, 7]
[ "r", "Add", "nodes", "to", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L426-L508
pmacosta/ptrie
ptrie/ptrie.py
Trie.collapse_subtree
def collapse_subtree(self, name, recursive=True): # noqa: D302 r""" Collapse a sub-tree. Nodes that have a single child and no data are combined with their child as a single tree node :param name: Root of the sub-tree to collapse :type name: :ref:`NodeName` :param recursive: Flag that indicates whether the collapse operation is performed on the whole sub-tree (True) or whether it stops upon reaching the first node where the collapsing condition is not satisfied (False) :type recursive: boolean :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Argument \`recursive\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.collapse_subtree('root.branch1') >>> print(tobj) root ├branch1 (*) │├leaf1.subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 ``root.branch1.leaf1`` is collapsed because it only has one child (``root.branch1.leaf1.subleaf1``) and no data; ``root.branch1.leaf2`` is not collapsed because although it has one child (``root.branch1.leaf2.subleaf2``) and this child does have data associated with it, :code:`'Hello world!'` """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") if not isinstance(recursive, bool): raise RuntimeError("Argument `recursive` is not valid") self._node_in_tree(name) self._collapse_subtree(name, recursive)
python
def collapse_subtree(self, name, recursive=True): # noqa: D302 r""" Collapse a sub-tree. Nodes that have a single child and no data are combined with their child as a single tree node :param name: Root of the sub-tree to collapse :type name: :ref:`NodeName` :param recursive: Flag that indicates whether the collapse operation is performed on the whole sub-tree (True) or whether it stops upon reaching the first node where the collapsing condition is not satisfied (False) :type recursive: boolean :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Argument \`recursive\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.collapse_subtree('root.branch1') >>> print(tobj) root ├branch1 (*) │├leaf1.subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 ``root.branch1.leaf1`` is collapsed because it only has one child (``root.branch1.leaf1.subleaf1``) and no data; ``root.branch1.leaf2`` is not collapsed because although it has one child (``root.branch1.leaf2.subleaf2``) and this child does have data associated with it, :code:`'Hello world!'` """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") if not isinstance(recursive, bool): raise RuntimeError("Argument `recursive` is not valid") self._node_in_tree(name) self._collapse_subtree(name, recursive)
[ "def", "collapse_subtree", "(", "self", ",", "name", ",", "recursive", "=", "True", ")", ":", "# noqa: D302", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "if", "not", "isinstance", "(", "recursive", ",", "bool", ")", ":", "raise", "RuntimeError", "(", "\"Argument `recursive` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "self", ".", "_collapse_subtree", "(", "name", ",", "recursive", ")" ]
r""" Collapse a sub-tree. Nodes that have a single child and no data are combined with their child as a single tree node :param name: Root of the sub-tree to collapse :type name: :ref:`NodeName` :param recursive: Flag that indicates whether the collapse operation is performed on the whole sub-tree (True) or whether it stops upon reaching the first node where the collapsing condition is not satisfied (False) :type recursive: boolean :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Argument \`recursive\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.collapse_subtree('root.branch1') >>> print(tobj) root ├branch1 (*) │├leaf1.subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 ``root.branch1.leaf1`` is collapsed because it only has one child (``root.branch1.leaf1.subleaf1``) and no data; ``root.branch1.leaf2`` is not collapsed because although it has one child (``root.branch1.leaf2.subleaf2``) and this child does have data associated with it, :code:`'Hello world!'`
[ "r", "Collapse", "a", "sub", "-", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L510-L567
pmacosta/ptrie
ptrie/ptrie.py
Trie.copy_subtree
def copy_subtree(self, source_node, dest_node): # noqa: D302 r""" Copy a sub-tree from one sub-node to another. Data is added if some nodes of the source sub-tree exist in the destination sub-tree :param source_name: Root node of the sub-tree to copy from :type source_name: :ref:`NodeName` :param dest_name: Root node of the sub-tree to copy to :type dest_name: :ref:`NodeName` :raises: * RuntimeError (Argument \`dest_node\` is not valid) * RuntimeError (Argument \`source_node\` is not valid) * RuntimeError (Illegal root in destination node) * RuntimeError (Node *[source_node]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.copy_subtree('root.branch1', 'root.branch3') >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 ├branch2 └branch3 (*) ├leaf1 │└subleaf1 (*) └leaf2 (*) └subleaf2 """ if self._validate_node_name(source_node): raise RuntimeError("Argument `source_node` is not valid") if self._validate_node_name(dest_node): raise RuntimeError("Argument `dest_node` is not valid") if source_node not in self._db: raise RuntimeError("Node {0} not in tree".format(source_node)) if not dest_node.startswith(self.root_name + self._node_separator): raise RuntimeError("Illegal root in destination node") for node in self._get_subtree(source_node): self._db[node.replace(source_node, dest_node, 1)] = { "parent": self._db[node]["parent"].replace(source_node, dest_node, 1), "children": [ child.replace(source_node, dest_node, 1) for child in self._db[node]["children"] ], "data": copy.deepcopy(self._db[node]["data"]), } self._create_intermediate_nodes(dest_node) parent = self._node_separator.join(dest_node.split(self._node_separator)[:-1]) self._db[dest_node]["parent"] = parent self._db[parent]["children"] = sorted( self._db[parent]["children"] + [dest_node] )
python
def copy_subtree(self, source_node, dest_node): # noqa: D302 r""" Copy a sub-tree from one sub-node to another. Data is added if some nodes of the source sub-tree exist in the destination sub-tree :param source_name: Root node of the sub-tree to copy from :type source_name: :ref:`NodeName` :param dest_name: Root node of the sub-tree to copy to :type dest_name: :ref:`NodeName` :raises: * RuntimeError (Argument \`dest_node\` is not valid) * RuntimeError (Argument \`source_node\` is not valid) * RuntimeError (Illegal root in destination node) * RuntimeError (Node *[source_node]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.copy_subtree('root.branch1', 'root.branch3') >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 ├branch2 └branch3 (*) ├leaf1 │└subleaf1 (*) └leaf2 (*) └subleaf2 """ if self._validate_node_name(source_node): raise RuntimeError("Argument `source_node` is not valid") if self._validate_node_name(dest_node): raise RuntimeError("Argument `dest_node` is not valid") if source_node not in self._db: raise RuntimeError("Node {0} not in tree".format(source_node)) if not dest_node.startswith(self.root_name + self._node_separator): raise RuntimeError("Illegal root in destination node") for node in self._get_subtree(source_node): self._db[node.replace(source_node, dest_node, 1)] = { "parent": self._db[node]["parent"].replace(source_node, dest_node, 1), "children": [ child.replace(source_node, dest_node, 1) for child in self._db[node]["children"] ], "data": copy.deepcopy(self._db[node]["data"]), } self._create_intermediate_nodes(dest_node) parent = self._node_separator.join(dest_node.split(self._node_separator)[:-1]) self._db[dest_node]["parent"] = parent self._db[parent]["children"] = sorted( self._db[parent]["children"] + [dest_node] )
[ "def", "copy_subtree", "(", "self", ",", "source_node", ",", "dest_node", ")", ":", "# noqa: D302", "if", "self", ".", "_validate_node_name", "(", "source_node", ")", ":", "raise", "RuntimeError", "(", "\"Argument `source_node` is not valid\"", ")", "if", "self", ".", "_validate_node_name", "(", "dest_node", ")", ":", "raise", "RuntimeError", "(", "\"Argument `dest_node` is not valid\"", ")", "if", "source_node", "not", "in", "self", ".", "_db", ":", "raise", "RuntimeError", "(", "\"Node {0} not in tree\"", ".", "format", "(", "source_node", ")", ")", "if", "not", "dest_node", ".", "startswith", "(", "self", ".", "root_name", "+", "self", ".", "_node_separator", ")", ":", "raise", "RuntimeError", "(", "\"Illegal root in destination node\"", ")", "for", "node", "in", "self", ".", "_get_subtree", "(", "source_node", ")", ":", "self", ".", "_db", "[", "node", ".", "replace", "(", "source_node", ",", "dest_node", ",", "1", ")", "]", "=", "{", "\"parent\"", ":", "self", ".", "_db", "[", "node", "]", "[", "\"parent\"", "]", ".", "replace", "(", "source_node", ",", "dest_node", ",", "1", ")", ",", "\"children\"", ":", "[", "child", ".", "replace", "(", "source_node", ",", "dest_node", ",", "1", ")", "for", "child", "in", "self", ".", "_db", "[", "node", "]", "[", "\"children\"", "]", "]", ",", "\"data\"", ":", "copy", ".", "deepcopy", "(", "self", ".", "_db", "[", "node", "]", "[", "\"data\"", "]", ")", ",", "}", "self", ".", "_create_intermediate_nodes", "(", "dest_node", ")", "parent", "=", "self", ".", "_node_separator", ".", "join", "(", "dest_node", ".", "split", "(", "self", ".", "_node_separator", ")", "[", ":", "-", "1", "]", ")", "self", ".", "_db", "[", "dest_node", "]", "[", "\"parent\"", "]", "=", "parent", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "=", "sorted", "(", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "+", "[", "dest_node", "]", ")" ]
r""" Copy a sub-tree from one sub-node to another. Data is added if some nodes of the source sub-tree exist in the destination sub-tree :param source_name: Root node of the sub-tree to copy from :type source_name: :ref:`NodeName` :param dest_name: Root node of the sub-tree to copy to :type dest_name: :ref:`NodeName` :raises: * RuntimeError (Argument \`dest_node\` is not valid) * RuntimeError (Argument \`source_node\` is not valid) * RuntimeError (Illegal root in destination node) * RuntimeError (Node *[source_node]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.copy_subtree('root.branch1', 'root.branch3') >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 ├branch2 └branch3 (*) ├leaf1 │└subleaf1 (*) └leaf2 (*) └subleaf2
[ "r", "Copy", "a", "sub", "-", "tree", "from", "one", "sub", "-", "node", "to", "another", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L569-L642
pmacosta/ptrie
ptrie/ptrie.py
Trie.delete_prefix
def delete_prefix(self, name): # noqa: D302 r""" Delete hierarchy levels from all nodes in the tree. :param nodes: Prefix to delete :type nodes: :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not a valid prefix) * RuntimeError (Argument \`name\` is not valid) For example: >>> from __future__ import print_function >>> import ptrie >>> tobj = ptrie.Trie('/') >>> tobj.add_nodes([ ... {'name':'hello/world/root', 'data':[]}, ... {'name':'hello/world/root/anode', 'data':7}, ... {'name':'hello/world/root/bnode', 'data':8}, ... {'name':'hello/world/root/cnode', 'data':False}, ... {'name':'hello/world/root/bnode/anode', 'data':['a', 'b']}, ... {'name':'hello/world/root/cnode/anode/leaf', 'data':True} ... ]) >>> tobj.collapse_subtree('hello', recursive=False) >>> print(tobj) hello/world/root ├anode (*) ├bnode (*) │└anode (*) └cnode (*) └anode └leaf (*) >>> tobj.delete_prefix('hello/world') >>> print(tobj) root ├anode (*) ├bnode (*) │└anode (*) └cnode (*) └anode └leaf (*) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") if (not self.root_name.startswith(name)) or (self.root_name == name): raise RuntimeError("Argument `name` is not a valid prefix") self._delete_prefix(name)
python
def delete_prefix(self, name): # noqa: D302 r""" Delete hierarchy levels from all nodes in the tree. :param nodes: Prefix to delete :type nodes: :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not a valid prefix) * RuntimeError (Argument \`name\` is not valid) For example: >>> from __future__ import print_function >>> import ptrie >>> tobj = ptrie.Trie('/') >>> tobj.add_nodes([ ... {'name':'hello/world/root', 'data':[]}, ... {'name':'hello/world/root/anode', 'data':7}, ... {'name':'hello/world/root/bnode', 'data':8}, ... {'name':'hello/world/root/cnode', 'data':False}, ... {'name':'hello/world/root/bnode/anode', 'data':['a', 'b']}, ... {'name':'hello/world/root/cnode/anode/leaf', 'data':True} ... ]) >>> tobj.collapse_subtree('hello', recursive=False) >>> print(tobj) hello/world/root ├anode (*) ├bnode (*) │└anode (*) └cnode (*) └anode └leaf (*) >>> tobj.delete_prefix('hello/world') >>> print(tobj) root ├anode (*) ├bnode (*) │└anode (*) └cnode (*) └anode └leaf (*) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") if (not self.root_name.startswith(name)) or (self.root_name == name): raise RuntimeError("Argument `name` is not a valid prefix") self._delete_prefix(name)
[ "def", "delete_prefix", "(", "self", ",", "name", ")", ":", "# noqa: D302", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "if", "(", "not", "self", ".", "root_name", ".", "startswith", "(", "name", ")", ")", "or", "(", "self", ".", "root_name", "==", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not a valid prefix\"", ")", "self", ".", "_delete_prefix", "(", "name", ")" ]
r""" Delete hierarchy levels from all nodes in the tree. :param nodes: Prefix to delete :type nodes: :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not a valid prefix) * RuntimeError (Argument \`name\` is not valid) For example: >>> from __future__ import print_function >>> import ptrie >>> tobj = ptrie.Trie('/') >>> tobj.add_nodes([ ... {'name':'hello/world/root', 'data':[]}, ... {'name':'hello/world/root/anode', 'data':7}, ... {'name':'hello/world/root/bnode', 'data':8}, ... {'name':'hello/world/root/cnode', 'data':False}, ... {'name':'hello/world/root/bnode/anode', 'data':['a', 'b']}, ... {'name':'hello/world/root/cnode/anode/leaf', 'data':True} ... ]) >>> tobj.collapse_subtree('hello', recursive=False) >>> print(tobj) hello/world/root ├anode (*) ├bnode (*) │└anode (*) └cnode (*) └anode └leaf (*) >>> tobj.delete_prefix('hello/world') >>> print(tobj) root ├anode (*) ├bnode (*) │└anode (*) └cnode (*) └anode └leaf (*)
[ "r", "Delete", "hierarchy", "levels", "from", "all", "nodes", "in", "the", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L644-L692
pmacosta/ptrie
ptrie/ptrie.py
Trie.delete_subtree
def delete_subtree(self, nodes): # noqa: D302 r""" Delete nodes (and their sub-trees) from the tree. :param nodes: Node(s) to delete :type nodes: :ref:`NodeName` or list of :ref:`NodeName` :raises: * RuntimeError (Argument \`nodes\` is not valid) * RuntimeError (Node *[node_name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2']) >>> print(tobj) root └branch1 (*) └leaf2 (*) └subleaf2 """ if self._validate_node_name(nodes): raise RuntimeError("Argument `nodes` is not valid") self._delete_subtree(nodes)
python
def delete_subtree(self, nodes): # noqa: D302 r""" Delete nodes (and their sub-trees) from the tree. :param nodes: Node(s) to delete :type nodes: :ref:`NodeName` or list of :ref:`NodeName` :raises: * RuntimeError (Argument \`nodes\` is not valid) * RuntimeError (Node *[node_name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2']) >>> print(tobj) root └branch1 (*) └leaf2 (*) └subleaf2 """ if self._validate_node_name(nodes): raise RuntimeError("Argument `nodes` is not valid") self._delete_subtree(nodes)
[ "def", "delete_subtree", "(", "self", ",", "nodes", ")", ":", "# noqa: D302", "if", "self", ".", "_validate_node_name", "(", "nodes", ")", ":", "raise", "RuntimeError", "(", "\"Argument `nodes` is not valid\"", ")", "self", ".", "_delete_subtree", "(", "nodes", ")" ]
r""" Delete nodes (and their sub-trees) from the tree. :param nodes: Node(s) to delete :type nodes: :ref:`NodeName` or list of :ref:`NodeName` :raises: * RuntimeError (Argument \`nodes\` is not valid) * RuntimeError (Node *[node_name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2']) >>> print(tobj) root └branch1 (*) └leaf2 (*) └subleaf2
[ "r", "Delete", "nodes", "(", "and", "their", "sub", "-", "trees", ")", "from", "the", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L694-L729
pmacosta/ptrie
ptrie/ptrie.py
Trie.flatten_subtree
def flatten_subtree(self, name): # noqa: D302 r""" Flatten sub-tree. Nodes that have children and no data are merged with each child :param name: Ending hierarchy node whose sub-trees are to be flattened :type name: :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> tobj.add_nodes([ ... {'name':'root.branch1.leaf1.subleaf2', 'data':[]}, ... {'name':'root.branch2.leaf1', 'data':'loren ipsum'}, ... {'name':'root.branch2.leaf1.another_subleaf1', 'data':[]}, ... {'name':'root.branch2.leaf1.another_subleaf2', 'data':[]} ... ]) >>> print(str(tobj)) root ├branch1 (*) │├leaf1 ││├subleaf1 (*) ││└subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 >>> tobj.flatten_subtree('root.branch1.leaf1') >>> print(str(tobj)) root ├branch1 (*) │├leaf1.subleaf1 (*) │├leaf1.subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 >>> tobj.flatten_subtree('root.branch2.leaf1') >>> print(str(tobj)) root ├branch1 (*) │├leaf1.subleaf1 (*) │├leaf1.subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) parent = self._db[name]["parent"] if (parent) and (not self._db[name]["data"]): children = self._db[name]["children"] for child in children: self._db[child]["parent"] = parent self._db[parent]["children"].remove(name) self._db[parent]["children"] = sorted( self._db[parent]["children"] + children ) del self._db[name]
python
def flatten_subtree(self, name): # noqa: D302 r""" Flatten sub-tree. Nodes that have children and no data are merged with each child :param name: Ending hierarchy node whose sub-trees are to be flattened :type name: :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> tobj.add_nodes([ ... {'name':'root.branch1.leaf1.subleaf2', 'data':[]}, ... {'name':'root.branch2.leaf1', 'data':'loren ipsum'}, ... {'name':'root.branch2.leaf1.another_subleaf1', 'data':[]}, ... {'name':'root.branch2.leaf1.another_subleaf2', 'data':[]} ... ]) >>> print(str(tobj)) root ├branch1 (*) │├leaf1 ││├subleaf1 (*) ││└subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 >>> tobj.flatten_subtree('root.branch1.leaf1') >>> print(str(tobj)) root ├branch1 (*) │├leaf1.subleaf1 (*) │├leaf1.subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 >>> tobj.flatten_subtree('root.branch2.leaf1') >>> print(str(tobj)) root ├branch1 (*) │├leaf1.subleaf1 (*) │├leaf1.subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) parent = self._db[name]["parent"] if (parent) and (not self._db[name]["data"]): children = self._db[name]["children"] for child in children: self._db[child]["parent"] = parent self._db[parent]["children"].remove(name) self._db[parent]["children"] = sorted( self._db[parent]["children"] + children ) del self._db[name]
[ "def", "flatten_subtree", "(", "self", ",", "name", ")", ":", "# noqa: D302", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "parent", "=", "self", ".", "_db", "[", "name", "]", "[", "\"parent\"", "]", "if", "(", "parent", ")", "and", "(", "not", "self", ".", "_db", "[", "name", "]", "[", "\"data\"", "]", ")", ":", "children", "=", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]", "for", "child", "in", "children", ":", "self", ".", "_db", "[", "child", "]", "[", "\"parent\"", "]", "=", "parent", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", ".", "remove", "(", "name", ")", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "=", "sorted", "(", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "+", "children", ")", "del", "self", ".", "_db", "[", "name", "]" ]
r""" Flatten sub-tree. Nodes that have children and no data are merged with each child :param name: Ending hierarchy node whose sub-trees are to be flattened :type name: :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example >>> tobj = docs.support.ptrie_example.create_tree() >>> tobj.add_nodes([ ... {'name':'root.branch1.leaf1.subleaf2', 'data':[]}, ... {'name':'root.branch2.leaf1', 'data':'loren ipsum'}, ... {'name':'root.branch2.leaf1.another_subleaf1', 'data':[]}, ... {'name':'root.branch2.leaf1.another_subleaf2', 'data':[]} ... ]) >>> print(str(tobj)) root ├branch1 (*) │├leaf1 ││├subleaf1 (*) ││└subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 >>> tobj.flatten_subtree('root.branch1.leaf1') >>> print(str(tobj)) root ├branch1 (*) │├leaf1.subleaf1 (*) │├leaf1.subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2 >>> tobj.flatten_subtree('root.branch2.leaf1') >>> print(str(tobj)) root ├branch1 (*) │├leaf1.subleaf1 (*) │├leaf1.subleaf2 │└leaf2 (*) │ └subleaf2 └branch2 └leaf1 (*) ├another_subleaf1 └another_subleaf2
[ "r", "Flatten", "sub", "-", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L731-L807
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_children
def get_children(self, name): r""" Get the children node names of a node. :param name: Parent node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return sorted(self._db[name]["children"])
python
def get_children(self, name): r""" Get the children node names of a node. :param name: Parent node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return sorted(self._db[name]["children"])
[ "def", "get_children", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "sorted", "(", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]", ")" ]
r""" Get the children node names of a node. :param name: Parent node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Get", "the", "children", "node", "names", "of", "a", "node", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L809-L826
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_data
def get_data(self, name): r""" Get the data associated with a node. :param name: Node name :type name: :ref:`NodeName` :rtype: any type or list of objects of any type :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._db[name]["data"]
python
def get_data(self, name): r""" Get the data associated with a node. :param name: Node name :type name: :ref:`NodeName` :rtype: any type or list of objects of any type :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._db[name]["data"]
[ "def", "get_data", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "self", ".", "_db", "[", "name", "]", "[", "\"data\"", "]" ]
r""" Get the data associated with a node. :param name: Node name :type name: :ref:`NodeName` :rtype: any type or list of objects of any type :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Get", "the", "data", "associated", "with", "a", "node", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L828-L845
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_leafs
def get_leafs(self, name): r""" Get the sub-tree leaf node(s). :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return [node for node in self._get_subtree(name) if self.is_leaf(node)]
python
def get_leafs(self, name): r""" Get the sub-tree leaf node(s). :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return [node for node in self._get_subtree(name) if self.is_leaf(node)]
[ "def", "get_leafs", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "[", "node", "for", "node", "in", "self", ".", "_get_subtree", "(", "name", ")", "if", "self", ".", "is_leaf", "(", "node", ")", "]" ]
r""" Get the sub-tree leaf node(s). :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Get", "the", "sub", "-", "tree", "leaf", "node", "(", "s", ")", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L847-L864
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_node
def get_node(self, name): r""" Get a tree node structure. The structure is a dictionary with the following keys: * **parent** (*NodeName*) Parent node name, :code:`''` if the node is the root node * **children** (*list of NodeName*) Children node names, an empty list if node is a leaf * **data** (*list*) Node data, an empty list if node contains no data :param name: Node name :type name: string :rtype: dictionary :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._db[name]
python
def get_node(self, name): r""" Get a tree node structure. The structure is a dictionary with the following keys: * **parent** (*NodeName*) Parent node name, :code:`''` if the node is the root node * **children** (*list of NodeName*) Children node names, an empty list if node is a leaf * **data** (*list*) Node data, an empty list if node contains no data :param name: Node name :type name: string :rtype: dictionary :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._db[name]
[ "def", "get_node", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "self", ".", "_db", "[", "name", "]" ]
r""" Get a tree node structure. The structure is a dictionary with the following keys: * **parent** (*NodeName*) Parent node name, :code:`''` if the node is the root node * **children** (*list of NodeName*) Children node names, an empty list if node is a leaf * **data** (*list*) Node data, an empty list if node contains no data :param name: Node name :type name: string :rtype: dictionary :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Get", "a", "tree", "node", "structure", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L866-L893
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_node_children
def get_node_children(self, name): r""" Get the list of children structures of a node. See :py:meth:`ptrie.Trie.get_node` for details about the structure :param name: Parent node name :type name: :ref:`NodeName` :rtype: list :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return [self._db[child] for child in self._db[name]["children"]]
python
def get_node_children(self, name): r""" Get the list of children structures of a node. See :py:meth:`ptrie.Trie.get_node` for details about the structure :param name: Parent node name :type name: :ref:`NodeName` :rtype: list :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return [self._db[child] for child in self._db[name]["children"]]
[ "def", "get_node_children", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "[", "self", ".", "_db", "[", "child", "]", "for", "child", "in", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]", "]" ]
r""" Get the list of children structures of a node. See :py:meth:`ptrie.Trie.get_node` for details about the structure :param name: Parent node name :type name: :ref:`NodeName` :rtype: list :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Get", "the", "list", "of", "children", "structures", "of", "a", "node", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L895-L914
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_node_parent
def get_node_parent(self, name): r""" Get the parent structure of a node. See :py:meth:`ptrie.Trie.get_node` for details about the structure :param name: Child node name :type name: :ref:`NodeName` :rtype: dictionary :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._db[self._db[name]["parent"]] if not self.is_root(name) else {}
python
def get_node_parent(self, name): r""" Get the parent structure of a node. See :py:meth:`ptrie.Trie.get_node` for details about the structure :param name: Child node name :type name: :ref:`NodeName` :rtype: dictionary :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._db[self._db[name]["parent"]] if not self.is_root(name) else {}
[ "def", "get_node_parent", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "self", ".", "_db", "[", "self", ".", "_db", "[", "name", "]", "[", "\"parent\"", "]", "]", "if", "not", "self", ".", "is_root", "(", "name", ")", "else", "{", "}" ]
r""" Get the parent structure of a node. See :py:meth:`ptrie.Trie.get_node` for details about the structure :param name: Child node name :type name: :ref:`NodeName` :rtype: dictionary :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Get", "the", "parent", "structure", "of", "a", "node", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L916-L935
pmacosta/ptrie
ptrie/ptrie.py
Trie.get_subtree
def get_subtree(self, name): # noqa: D302 r""" Get all node names in a sub-tree. :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example, pprint >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> pprint.pprint(tobj.get_subtree('root.branch1')) ['root.branch1', 'root.branch1.leaf1', 'root.branch1.leaf1.subleaf1', 'root.branch1.leaf2', 'root.branch1.leaf2.subleaf2'] """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._get_subtree(name)
python
def get_subtree(self, name): # noqa: D302 r""" Get all node names in a sub-tree. :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example, pprint >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> pprint.pprint(tobj.get_subtree('root.branch1')) ['root.branch1', 'root.branch1.leaf1', 'root.branch1.leaf1.subleaf1', 'root.branch1.leaf2', 'root.branch1.leaf2.subleaf2'] """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._get_subtree(name)
[ "def", "get_subtree", "(", "self", ",", "name", ")", ":", "# noqa: D302", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "self", ".", "_get_subtree", "(", "name", ")" ]
r""" Get all node names in a sub-tree. :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example, pprint >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> pprint.pprint(tobj.get_subtree('root.branch1')) ['root.branch1', 'root.branch1.leaf1', 'root.branch1.leaf1.subleaf1', 'root.branch1.leaf2', 'root.branch1.leaf2.subleaf2']
[ "r", "Get", "all", "node", "names", "in", "a", "sub", "-", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L937-L975
pmacosta/ptrie
ptrie/ptrie.py
Trie.in_tree
def in_tree(self, name): r""" Test if a node is in the tree. :param name: Node name to search for :type name: :ref:`NodeName` :rtype: boolean :raises: RuntimeError (Argument \`name\` is not valid) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") return name in self._db
python
def in_tree(self, name): r""" Test if a node is in the tree. :param name: Node name to search for :type name: :ref:`NodeName` :rtype: boolean :raises: RuntimeError (Argument \`name\` is not valid) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") return name in self._db
[ "def", "in_tree", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "return", "name", "in", "self", ".", "_db" ]
r""" Test if a node is in the tree. :param name: Node name to search for :type name: :ref:`NodeName` :rtype: boolean :raises: RuntimeError (Argument \`name\` is not valid)
[ "r", "Test", "if", "a", "node", "is", "in", "the", "tree", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L996-L1009
pmacosta/ptrie
ptrie/ptrie.py
Trie.is_leaf
def is_leaf(self, name): r""" Test if a node is a leaf node (node with no children). :param name: Node name :type name: :ref:`NodeName` :rtype: boolean :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return not self._db[name]["children"]
python
def is_leaf(self, name): r""" Test if a node is a leaf node (node with no children). :param name: Node name :type name: :ref:`NodeName` :rtype: boolean :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) """ if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return not self._db[name]["children"]
[ "def", "is_leaf", "(", "self", ",", "name", ")", ":", "if", "self", ".", "_validate_node_name", "(", "name", ")", ":", "raise", "RuntimeError", "(", "\"Argument `name` is not valid\"", ")", "self", ".", "_node_in_tree", "(", "name", ")", "return", "not", "self", ".", "_db", "[", "name", "]", "[", "\"children\"", "]" ]
r""" Test if a node is a leaf node (node with no children). :param name: Node name :type name: :ref:`NodeName` :rtype: boolean :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
[ "r", "Test", "if", "a", "node", "is", "a", "leaf", "node", "(", "node", "with", "no", "children", ")", "." ]
train
https://github.com/pmacosta/ptrie/blob/c176d3ee810b7b5243c7ff2bbf2f1af0b0fff2a8/ptrie/ptrie.py#L1011-L1028